[ 774.170147] env[61914]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 774.814807] env[61964]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 776.165645] env[61964]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61964) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 776.166088] env[61964]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61964) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 776.166088] env[61964]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61964) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 776.166371] env[61964]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 776.363728] env[61964]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61964) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:384}} [ 776.374087] env[61964]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61964) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:422}} [ 776.479037] env[61964]: INFO nova.virt.driver [None req-5b12946b-1198-47e8-a1ca-9b6202a9d1a6 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 776.552892] env[61964]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 776.553130] env[61964]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 776.553197] env[61964]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61964) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 779.584239] env[61964]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-6deca050-5ff8-4090-8cb9-4db0d86780a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.601682] env[61964]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61964) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 779.601861] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6e339c74-0428-4fdf-b892-e33c3e906a14 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.670056] env[61964]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 7806b. [ 779.670230] env[61964]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.117s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 779.670743] env[61964]: INFO nova.virt.vmwareapi.driver [None req-5b12946b-1198-47e8-a1ca-9b6202a9d1a6 None None] VMware vCenter version: 7.0.3 [ 779.674114] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9f143d-4e18-47fc-bcf7-4be40d642f44 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.695806] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fbcfb2-e2d6-48e7-bf2f-9100165ac0ec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.701975] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73fc033-29ec-4eb7-9f80-4889e6dfc984 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.708641] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f884517-c8fe-452c-ac66-0104b8f664d2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.721625] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78d116c-2e7b-45fd-9721-763f2fb0effc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.727606] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd4ad20-01da-4c8b-9514-e51befb39ffe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.758018] env[61964]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-005a659b-266f-4cbe-8b61-63d43846b16d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.763427] env[61964]: DEBUG nova.virt.vmwareapi.driver [None req-5b12946b-1198-47e8-a1ca-9b6202a9d1a6 None None] Extension org.openstack.compute already exists. {{(pid=61964) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 779.766123] env[61964]: INFO nova.compute.provider_config [None req-5b12946b-1198-47e8-a1ca-9b6202a9d1a6 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 779.787354] env[61964]: DEBUG nova.context [None req-5b12946b-1198-47e8-a1ca-9b6202a9d1a6 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),93ce21d0-bd2e-45bc-974a-86d8002bfac8(cell1) {{(pid=61964) load_cells /opt/stack/nova/nova/context.py:464}} [ 779.789536] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 779.789762] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 779.790500] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 779.790920] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Acquiring lock "93ce21d0-bd2e-45bc-974a-86d8002bfac8" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 779.791126] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Lock "93ce21d0-bd2e-45bc-974a-86d8002bfac8" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 779.792091] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Lock "93ce21d0-bd2e-45bc-974a-86d8002bfac8" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 779.817013] env[61964]: INFO dbcounter [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Registered counter for database nova_cell0 [ 779.825434] env[61964]: INFO dbcounter [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Registered counter for database nova_cell1 [ 779.828565] env[61964]: DEBUG oslo_db.sqlalchemy.engines [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61964) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 779.829184] env[61964]: DEBUG oslo_db.sqlalchemy.engines [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61964) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 779.833478] env[61964]: DEBUG dbcounter [-] [61964] Writer thread running {{(pid=61964) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 779.836316] env[61964]: ERROR nova.db.main.api [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 779.836316] env[61964]: result = function(*args, **kwargs) [ 779.836316] env[61964]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.836316] env[61964]: return func(*args, **kwargs) [ 779.836316] env[61964]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 779.836316] env[61964]: result = fn(*args, **kwargs) [ 779.836316] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 779.836316] env[61964]: return f(*args, **kwargs) [ 779.836316] env[61964]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 779.836316] env[61964]: return db.service_get_minimum_version(context, binaries) [ 779.836316] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 779.836316] env[61964]: _check_db_access() [ 779.836316] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 779.836316] env[61964]: stacktrace = ''.join(traceback.format_stack()) [ 779.836316] env[61964]: [ 779.836786] env[61964]: DEBUG dbcounter [-] [61964] Writer thread running {{(pid=61964) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 779.837611] env[61964]: ERROR nova.db.main.api [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 779.837611] env[61964]: result = function(*args, **kwargs) [ 779.837611] env[61964]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.837611] env[61964]: return func(*args, **kwargs) [ 779.837611] env[61964]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 779.837611] env[61964]: result = fn(*args, **kwargs) [ 779.837611] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 779.837611] env[61964]: return f(*args, **kwargs) [ 779.837611] env[61964]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 779.837611] env[61964]: return db.service_get_minimum_version(context, binaries) [ 779.837611] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 779.837611] env[61964]: _check_db_access() [ 779.837611] env[61964]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 779.837611] env[61964]: stacktrace = ''.join(traceback.format_stack()) [ 779.837611] env[61964]: [ 779.838067] env[61964]: WARNING nova.objects.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Failed to get minimum service version for cell 93ce21d0-bd2e-45bc-974a-86d8002bfac8 [ 779.838213] env[61964]: WARNING nova.objects.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 779.838652] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Acquiring lock "singleton_lock" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 779.838835] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Acquired lock "singleton_lock" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 779.839091] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Releasing lock "singleton_lock" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 779.839418] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Full set of CONF: {{(pid=61964) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:362}} [ 779.839561] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ******************************************************************************** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2589}} [ 779.839803] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] Configuration options gathered from: {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2590}} [ 779.839958] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2591}} [ 779.840165] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2592}} [ 779.840293] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ================================================================================ {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2594}} [ 779.840497] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] allow_resize_to_same_host = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.840665] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] arq_binding_timeout = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.840798] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] backdoor_port = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.840924] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] backdoor_socket = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841102] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] block_device_allocate_retries = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841272] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] block_device_allocate_retries_interval = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841440] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cert = self.pem {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841603] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841767] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute_monitors = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.841934] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] config_dir = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842114] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] config_drive_format = iso9660 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842250] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842415] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] config_source = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842578] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] console_host = devstack {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842802] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] control_exchange = nova {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.842970] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cpu_allocation_ratio = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843148] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] daemon = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843316] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] debug = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843473] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_access_ip_network_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843636] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_availability_zone = nova {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843791] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_ephemeral_format = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.843952] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_green_pool_size = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.844205] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.844366] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] default_schedule_zone = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.844523] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] disk_allocation_ratio = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.844804] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] enable_new_services = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845028] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] enabled_apis = ['osapi_compute'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845201] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] enabled_ssl_apis = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845366] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] flat_injected = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845524] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] force_config_drive = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845705] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] force_raw_images = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.845893] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] graceful_shutdown_timeout = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846065] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] heal_instance_info_cache_interval = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846287] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] host = cpu-1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846458] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846626] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846783] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.846995] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.847173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_build_timeout = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.847331] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_delete_interval = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.847496] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_format = [instance: %(uuid)s] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.847680] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_name_template = instance-%08x {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.847850] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_usage_audit = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848029] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_usage_audit_period = month {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848202] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848367] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848533] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] internal_service_availability_zone = internal {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848754] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] key = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.848925] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] live_migration_retry_count = 30 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849103] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_config_append = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849275] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849441] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_dir = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849599] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849728] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_options = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.849892] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_rotate_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850081] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_rotate_interval_type = days {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850254] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] log_rotation_type = none {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850385] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850511] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850680] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850846] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.850973] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851149] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] long_rpc_timeout = 1800 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851307] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_concurrent_builds = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851463] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_concurrent_live_migrations = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851618] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_concurrent_snapshots = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851774] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_local_block_devices = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.851931] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_logfile_count = 30 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852098] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] max_logfile_size_mb = 200 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852255] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] maximum_instance_delete_attempts = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852421] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metadata_listen = 0.0.0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852588] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metadata_listen_port = 8775 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852756] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metadata_workers = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.852918] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] migrate_max_retries = -1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853096] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] mkisofs_cmd = genisoimage {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853304] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853435] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] my_ip = 10.180.1.21 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853596] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] network_allocate_retries = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853769] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.853936] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854111] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] osapi_compute_listen_port = 8774 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854276] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] osapi_compute_unique_server_name_scope = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854439] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] osapi_compute_workers = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854596] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] password_length = 12 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854755] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] periodic_enable = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.854911] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] periodic_fuzzy_delay = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855088] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] pointer_model = usbtablet {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855256] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] preallocate_images = none {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855412] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] publish_errors = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855540] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] pybasedir = /opt/stack/nova {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855762] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ram_allocation_ratio = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.855940] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rate_limit_burst = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856121] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rate_limit_except_level = CRITICAL {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856280] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rate_limit_interval = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856442] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reboot_timeout = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856600] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reclaim_instance_interval = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856758] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] record = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.856927] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reimage_timeout_per_gb = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.857104] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] report_interval = 120 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.857267] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rescue_timeout = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.857425] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reserved_host_cpus = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.857583] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reserved_host_disk_mb = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.857888] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reserved_host_memory_mb = 512 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858071] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] reserved_huge_pages = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858238] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] resize_confirm_window = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858397] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] resize_fs_using_block_device = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858554] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] resume_guests_state_on_host_boot = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858723] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.858884] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rpc_response_timeout = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859053] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] run_external_periodic_tasks = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859225] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] running_deleted_instance_action = reap {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859384] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859539] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] running_deleted_instance_timeout = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859695] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler_instance_sync_interval = 120 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.859918] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_down_time = 720 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860148] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] servicegroup_driver = db {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860320] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] shelved_offload_time = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860481] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] shelved_poll_interval = 3600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860648] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] shutdown_timeout = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860816] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] source_is_ipv6 = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.860976] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ssl_only = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.861232] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.861400] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] sync_power_state_interval = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.861560] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] sync_power_state_pool_size = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.861729] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] syslog_log_facility = LOG_USER {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.861888] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] tempdir = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862056] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] timeout_nbd = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862229] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] transport_url = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862390] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] update_resources_interval = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862549] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_cow_images = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862711] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_eventlog = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.862870] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_journal = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863037] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_json = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863199] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_rootwrap_daemon = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863356] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_stderr = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863514] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] use_syslog = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863670] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vcpu_pin_set = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.863838] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plugging_is_fatal = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864011] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plugging_timeout = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864184] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] virt_mkfs = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864345] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] volume_usage_poll_interval = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864505] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] watch_log_file = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864673] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] web = /usr/share/spice-html5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 779.864861] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_concurrency.disable_process_locking = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.865167] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.865347] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.865514] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.865696] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.865873] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866051] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866238] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.auth_strategy = keystone {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866403] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.compute_link_prefix = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866576] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866807] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.dhcp_domain = novalocal {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.866982] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.enable_instance_password = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.867158] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.glance_link_prefix = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.867321] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.867489] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.867676] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.instance_list_per_project_cells = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.867847] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.list_records_by_skipping_down_cells = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.868021] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.local_metadata_per_cell = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.868191] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.max_limit = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.868355] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.metadata_cache_expiration = 15 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.868593] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.neutron_default_tenant_id = default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.868835] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.use_forwarded_for = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869026] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.use_neutron_default_nets = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869191] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869355] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869520] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869692] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.869864] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_dynamic_targets = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870067] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_jsonfile_path = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870267] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870461] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.backend = dogpile.cache.memcached {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870632] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.backend_argument = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870803] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.config_prefix = cache.oslo {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.870995] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.dead_timeout = 60.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.871239] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.debug_cache_backend = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.871415] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.enable_retry_client = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.871583] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.enable_socket_keepalive = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.871758] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.enabled = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.871928] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.expiration_time = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.872187] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.hashclient_retry_attempts = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.872365] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.872529] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_dead_retry = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.872699] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_password = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.872864] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873040] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873211] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_pool_maxsize = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873373] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873536] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_sasl_enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873716] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.873884] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874064] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.memcache_username = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874234] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.proxies = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874397] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.retry_attempts = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874560] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.retry_delay = 0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874725] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.socket_keepalive_count = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.874886] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.socket_keepalive_idle = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875057] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.socket_keepalive_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875219] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.tls_allowed_ciphers = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875375] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.tls_cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875532] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.tls_certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875691] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.tls_enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.875848] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cache.tls_keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876030] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876213] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.auth_type = password {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876374] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876550] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876711] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.876875] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877053] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.cross_az_attach = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877221] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.debug = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877382] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.endpoint_template = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877546] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.http_retries = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877735] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.877910] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.878098] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.os_region_name = RegionOne {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.878268] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.878430] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cinder.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.878609] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.878881] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.cpu_dedicated_set = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879081] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.cpu_shared_set = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879257] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.image_type_exclude_list = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879424] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879589] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879751] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.879916] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.880118] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.880294] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.resource_provider_association_refresh = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.880459] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.shutdown_retry_interval = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.880735] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.880947] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] conductor.workers = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.881148] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] console.allowed_origins = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.881314] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] console.ssl_ciphers = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.881486] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] console.ssl_minimum_version = default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.881661] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] consoleauth.token_ttl = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.881837] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.882103] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.882386] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.882662] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.882947] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.883245] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.883534] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.883759] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.883943] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884127] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884388] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884469] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884637] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.service_type = accelerator {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884802] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.884964] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885136] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885700] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885700] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885700] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] cyborg.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885810] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.backend = sqlalchemy {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.885986] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.connection = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.886173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.connection_debug = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.886355] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.connection_parameters = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.886519] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.connection_recycle_time = 3600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.886685] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.connection_trace = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.886849] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.db_inc_retry_interval = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887019] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.db_max_retries = 20 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887183] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.db_max_retry_interval = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887342] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.db_retry_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887509] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.max_overflow = 50 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887684] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.max_pool_size = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.887873] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.max_retries = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888056] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888222] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.mysql_wsrep_sync_wait = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888383] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.pool_timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888548] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.retry_interval = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888722] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.slave_connection = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.888921] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.sqlite_synchronous = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889102] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] database.use_db_reconnect = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889289] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.backend = sqlalchemy {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889465] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.connection = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889630] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.connection_debug = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889797] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.connection_parameters = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.889960] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.connection_recycle_time = 3600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890139] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.connection_trace = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890301] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.db_inc_retry_interval = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890536] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.db_max_retries = 20 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890620] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.db_max_retry_interval = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890775] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.db_retry_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.890941] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.max_overflow = 50 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891109] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.max_pool_size = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891277] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.max_retries = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891442] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891600] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891761] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.pool_timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.891927] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.retry_interval = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892096] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.slave_connection = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892260] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] api_database.sqlite_synchronous = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892430] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] devices.enabled_mdev_types = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892603] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892765] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ephemeral_storage_encryption.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.892934] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893117] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.api_servers = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893282] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893442] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893605] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893761] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.893933] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894137] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.debug = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894307] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.default_trusted_certificate_ids = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894469] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.enable_certificate_validation = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894627] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.enable_rbd_download = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894783] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.894947] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895119] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895279] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895435] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895598] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.num_retries = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895762] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.rbd_ceph_conf = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.895924] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.rbd_connect_timeout = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896103] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.rbd_pool = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896273] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.rbd_user = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896433] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896589] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896756] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.service_type = image {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.896916] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897082] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897244] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897400] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897578] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897762] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.verify_glance_signatures = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.897931] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] glance.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898112] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] guestfs.debug = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898286] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.config_drive_cdrom = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898448] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.config_drive_inject_password = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898613] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.dynamic_memory_ratio = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898802] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.enable_instance_metrics_collection = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.898968] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.enable_remotefx = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899151] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.instances_path_share = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899314] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.iscsi_initiator_list = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899476] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.limit_cpu_features = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899642] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.mounted_disk_query_retry_count = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899807] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.mounted_disk_query_retry_interval = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.899972] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.power_state_check_timeframe = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900151] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.power_state_event_polling_interval = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900322] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.qemu_img_cmd = qemu-img.exe {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900486] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.use_multipath_io = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900648] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.volume_attach_retry_count = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900811] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.volume_attach_retry_interval = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.900969] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.vswitch_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.901143] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] hyperv.wait_soft_reboot_seconds = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.901311] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] mks.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.901670] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.901861] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.manager_interval = 2400 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902041] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.precache_concurrency = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902216] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.remove_unused_base_images = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902385] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902552] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902730] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] image_cache.subdirectory_name = _base {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.902903] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.api_max_retries = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903076] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.api_retry_interval = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903240] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903402] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.auth_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903561] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903719] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.903883] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.904826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.conductor_group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.904826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.904826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.904826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.904826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905054] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905054] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905214] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905338] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.peer_list = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905499] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905665] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.serial_console_state_timeout = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.905831] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906024] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.service_type = baremetal {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906216] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906381] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906542] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906704] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.906886] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907063] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ironic.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907253] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907425] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] key_manager.fixed_key = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907608] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907804] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.barbican_api_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.907970] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.barbican_endpoint = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908159] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.barbican_endpoint_type = public {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908320] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.barbican_region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908479] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908637] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908815] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.908991] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909167] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909329] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.number_of_retries = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909490] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.retry_delay = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909652] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.send_service_user_token = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909834] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.909965] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910150] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.verify_ssl = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910290] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican.verify_ssl_path = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910458] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910620] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.auth_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910781] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.910942] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911122] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911285] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911442] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911605] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911764] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] barbican_service_user.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.911934] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.approle_role_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912107] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.approle_secret_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912269] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912422] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912581] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912742] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.912899] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913080] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.kv_mountpoint = secret {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913243] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.kv_path = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913406] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.kv_version = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913563] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.namespace = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913720] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.root_token_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.913882] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.914107] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.ssl_ca_crt_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.914317] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.914489] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.use_ssl = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.914662] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.914837] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915008] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.auth_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915176] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915336] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915501] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915669] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.915845] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916016] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916186] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916340] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916499] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916660] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.916893] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917086] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917266] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.service_type = identity {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917431] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917589] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917770] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.917936] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918132] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918294] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] keystone.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918493] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.connection_uri = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918655] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_mode = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918823] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.918989] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_models = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_power_governor_high = performance {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919342] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919501] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_power_management = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919671] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919835] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.device_detach_attempts = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.919999] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.device_detach_timeout = 20 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.920177] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.disk_cachemodes = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.920338] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.disk_prefix = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.920504] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.enabled_perf_events = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.920678] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.file_backed_memory = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.920845] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.gid_maps = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921008] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.hw_disk_discard = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.hw_machine_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921342] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_rbd_ceph_conf = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921506] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921674] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.921844] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_rbd_glance_store_name = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922019] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_rbd_pool = rbd {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922190] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_type = default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922346] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.images_volume_group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922506] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.inject_key = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922667] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.inject_partition = -2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922827] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.inject_password = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.922986] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.iscsi_iface = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923161] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.iser_use_multipath = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923322] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923482] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923642] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_downtime = 500 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923801] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.923965] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924141] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_inbound_addr = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924304] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924464] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924621] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_scheme = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924790] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_timeout_action = abort {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.924949] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_tunnelled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.925118] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_uri = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.925278] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.live_migration_with_native_tls = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.925434] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.max_queues = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.925595] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.925749] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.nfs_mount_options = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926068] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926246] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926409] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926566] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926727] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.926890] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_pcie_ports = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.927065] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.927230] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.pmem_namespaces = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.927386] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.quobyte_client_cfg = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.927674] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.927856] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928035] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928202] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928360] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rbd_secret_uuid = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928518] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rbd_user = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928678] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.928892] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929078] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rescue_image_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929243] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rescue_kernel_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929400] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rescue_ramdisk_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929568] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929728] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.rx_queue_size = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.929895] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.smbfs_mount_options = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.930182] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.930351] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.snapshot_compression = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.930511] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.snapshot_image_format = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.930754] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.930889] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.sparse_logical_volumes = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931063] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.swtpm_enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931233] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.swtpm_group = tss {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931397] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.swtpm_user = tss {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931562] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.sysinfo_serial = unique {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931718] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.tb_cache_size = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.931878] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.tx_queue_size = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932053] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.uid_maps = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932220] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.use_virtio_for_bridges = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932389] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.virt_type = kvm {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932558] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.volume_clear = zero {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932721] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.volume_clear_size = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.932887] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.volume_use_multipath = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933055] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_cache_path = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933227] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933392] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933556] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933721] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.933997] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.934188] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.vzstorage_mount_user = stack {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.934351] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.934524] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.934697] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.auth_type = password {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.934859] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935027] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935196] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935356] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935514] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935685] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.default_floating_pool = public {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.935847] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936015] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.extension_sync_interval = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936191] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.http_retries = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936352] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936510] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936670] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.936842] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937009] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937188] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.ovs_bridge = br-int {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937353] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.physnets = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937521] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.region_name = RegionOne {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937708] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.service_metadata_proxy = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.937878] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938059] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.service_type = network {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938227] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938385] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938542] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938702] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.938905] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939085] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] neutron.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939262] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] notifications.bdms_in_notifications = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939443] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] notifications.default_level = INFO {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939618] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] notifications.notification_format = unversioned {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939783] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] notifications.notify_on_state_change = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.939969] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.940160] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] pci.alias = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.940330] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] pci.device_spec = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.940493] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] pci.report_in_placement = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.940664] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.940869] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.auth_type = password {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941074] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941244] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941406] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941570] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941733] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.941896] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942071] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.default_domain_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942232] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.default_domain_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942391] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.domain_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942550] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.domain_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942707] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.942871] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943039] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943205] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943362] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943529] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.password = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943689] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.project_domain_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.943857] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.project_domain_name = Default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944034] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.project_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944215] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.project_name = service {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944383] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.region_name = RegionOne {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944543] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944711] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.service_type = placement {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.944877] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945044] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945213] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945371] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.system_scope = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945527] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945686] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.trust_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.945846] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.user_domain_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946022] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.user_domain_name = Default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946180] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.user_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946351] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.username = placement {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946530] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946689] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] placement.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.946865] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.cores = 20 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947040] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.count_usage_from_placement = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947214] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947382] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.injected_file_content_bytes = 10240 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947546] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.injected_file_path_length = 255 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947737] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.injected_files = 5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.947915] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.instances = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948092] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.key_pairs = 100 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948260] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.metadata_items = 128 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948424] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.ram = 51200 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948585] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.recheck_quota = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948757] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.server_group_members = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.948968] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] quota.server_groups = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.949172] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rdp.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.949496] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] rdp.html5_proxy_base_url = http://127.0.0.1:6083/ {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.949752] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.949935] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950115] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.image_metadata_prefilter = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950278] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950442] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.max_attempts = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950602] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.max_placement_results = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950764] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.950929] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951100] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951275] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] scheduler.workers = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951447] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951616] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951794] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.951965] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952143] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952307] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952471] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952658] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952827] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.host_subset_size = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.952990] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953167] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953329] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953493] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.isolated_hosts = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953656] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.isolated_images = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953821] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.953980] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954158] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954322] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.pci_in_placement = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954483] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954645] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954812] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.954973] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955150] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955314] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955475] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.track_instance_changes = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955654] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955823] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metrics.required = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.955984] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metrics.weight_multiplier = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.956160] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.956322] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] metrics.weight_setting = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.956625] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.956798] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.956975] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.port_range = 10000:20000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.957161] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.957325] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.957488] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] serial_console.serialproxy_port = 6083 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.957681] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.957851] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.auth_type = password {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958026] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958191] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958355] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958515] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958691] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.958891] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.send_service_user_token = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.959069] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.959233] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] service_user.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.959401] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.agent_enabled = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.959561] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.959849] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960080] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960271] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.html5proxy_port = 6082 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960437] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.image_compression = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960598] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.jpeg_compression = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960756] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.playback_compression = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.960931] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.server_listen = 127.0.0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961114] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961280] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.streaming_mode = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961441] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] spice.zlib_compression = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961606] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] upgrade_levels.baseapi = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961765] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] upgrade_levels.cert = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.961938] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] upgrade_levels.compute = auto {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.962140] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] upgrade_levels.conductor = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.962322] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] upgrade_levels.scheduler = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.962494] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.962658] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.962871] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963074] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963250] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963415] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963575] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963739] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.963902] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vendordata_dynamic_auth.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964085] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.api_retry_count = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964251] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.ca_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964421] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964588] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.cluster_name = testcl1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964752] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.connection_pool_size = 10 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.964913] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.console_delay_seconds = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965093] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.datastore_regex = ^datastore.* {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965305] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965475] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.host_password = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965637] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.host_port = 443 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965802] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.host_username = administrator@vsphere.local {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.965971] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.insecure = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.966147] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.integration_bridge = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.966310] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.maximum_objects = 100 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.966465] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.pbm_default_policy = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.966622] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.pbm_enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.966778] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.pbm_wsdl_location = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.967110] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.967393] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.serial_port_proxy_uri = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.967597] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.serial_port_service_uri = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.967804] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.task_poll_interval = 0.5 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968010] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.use_linked_clone = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968215] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.vnc_keymap = en-us {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968387] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.vnc_port = 5900 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968549] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vmware.vnc_port_total = 10000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968763] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.auth_schemes = ['none'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.968946] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.969261] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.969445] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.969617] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.novncproxy_port = 6080 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.969795] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.server_listen = 127.0.0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.969970] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970147] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.vencrypt_ca_certs = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970310] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.vencrypt_client_cert = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970467] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vnc.vencrypt_client_key = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970640] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970805] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_deep_image_inspection = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.970967] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971141] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971304] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971464] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.disable_rootwrap = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971624] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.enable_numa_live_migration = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971783] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.971942] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972112] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972273] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.libvirt_disable_apic = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972431] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972590] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972748] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.972906] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973073] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973236] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973393] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973549] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973708] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.973870] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974065] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974238] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.client_socket_timeout = 900 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974404] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.default_pool_size = 1000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974569] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.keep_alive = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974735] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.max_header_line = 16384 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.974899] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975071] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.ssl_ca_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975234] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.ssl_cert_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975396] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.ssl_key_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975562] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.tcp_keepidle = 600 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975735] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.975904] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] zvm.ca_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.976076] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] zvm.cloud_connector_url = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.976376] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.976549] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] zvm.reachable_timeout = 300 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.976729] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.enforce_new_defaults = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.976900] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.enforce_scope = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977095] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.policy_default_rule = default {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977284] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977457] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.policy_file = policy.yaml {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977627] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977826] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.977997] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.978173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.978337] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.978505] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.978701] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.978909] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.connection_string = messaging:// {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979093] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.enabled = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979267] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.es_doc_type = notification {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979429] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.es_scroll_size = 10000 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979596] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.es_scroll_time = 2m {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979758] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.filter_error_trace = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.979927] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.hmac_keys = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980105] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.sentinel_service_name = mymaster {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980270] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.socket_timeout = 0.1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980432] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.trace_requests = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980591] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler.trace_sqlalchemy = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980775] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler_jaeger.process_tags = {} {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.980937] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler_jaeger.service_name_prefix = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981112] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] profiler_otlp.service_name_prefix = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981281] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] remote_debug.host = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981440] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] remote_debug.port = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981618] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981779] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.981941] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982117] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982274] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982432] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982589] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982748] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.heartbeat_rate = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.982908] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983076] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983248] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983413] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983578] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983741] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.983903] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984086] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984250] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984409] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984571] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984731] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.984891] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985062] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985225] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985383] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985547] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985712] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.985881] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986060] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986222] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986386] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986552] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986737] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.986902] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_notifications.retry = -1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987098] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987275] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987446] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.auth_section = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987605] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.auth_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987792] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.cafile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.987968] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.certfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988144] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.collect_timing = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988303] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.connect_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988458] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.connect_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988613] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.endpoint_id = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988813] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.endpoint_override = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.988988] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.insecure = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989161] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.keyfile = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989318] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.max_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989475] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.min_version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989630] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.region_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989784] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.service_name = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.989937] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.service_type = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990108] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.split_loggers = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990264] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.status_code_retries = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990421] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.status_code_retry_delay = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990576] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.timeout = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990730] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.valid_interfaces = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.990891] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_limit.version = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991104] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_reports.file_event_handler = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991274] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991461] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] oslo_reports.log_dir = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991637] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991797] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.991956] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992136] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992299] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992457] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992624] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992783] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.992941] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993126] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993279] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993436] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] vif_plug_ovs_privileged.user = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993602] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993779] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.993952] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994140] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994316] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994486] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994652] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994875] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.994988] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995173] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.isolate_vif = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995340] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995504] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995675] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995839] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.995995] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_vif_ovs.per_port_bridge = False {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996176] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_brick.lock_path = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996338] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996498] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996664] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.capabilities = [21] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996821] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.996974] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.helper_command = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997149] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997310] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997465] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] privsep_osbrick.user = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997636] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997823] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.group = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.997984] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.helper_command = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.998162] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.998320] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.998475] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] nova_sys_admin.user = None {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 779.998605] env[61964]: DEBUG oslo_service.service [None req-eb1e6aba-9c0c-45ce-b020-b858b7003417 None None] ******************************************************************************** {{(pid=61964) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 779.999072] env[61964]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 780.009560] env[61964]: WARNING nova.virt.vmwareapi.driver [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 780.010011] env[61964]: INFO nova.virt.node [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Generated node identity c64b88bc-0cc7-41f7-af90-1e96b384d8a5 [ 780.010232] env[61964]: INFO nova.virt.node [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Wrote node identity c64b88bc-0cc7-41f7-af90-1e96b384d8a5 to /opt/stack/data/n-cpu-1/compute_id [ 780.023527] env[61964]: WARNING nova.compute.manager [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Compute nodes ['c64b88bc-0cc7-41f7-af90-1e96b384d8a5'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 780.062126] env[61964]: INFO nova.compute.manager [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 780.084155] env[61964]: WARNING nova.compute.manager [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 780.084431] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 780.084642] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 780.084785] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 780.084943] env[61964]: DEBUG nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 780.086094] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cfbc50-8f51-47e7-9d54-344063a069e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.094958] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0270d8-977a-4133-ac56-025167d2d737 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.108816] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93f8b52-213a-4c0d-a25f-9118f5bf282f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.115018] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0370eb49-31ca-4765-a4fb-f3f4adbe5bce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.144771] env[61964]: DEBUG nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181363MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 780.144939] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 780.145131] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 780.157135] env[61964]: WARNING nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] No compute node record for cpu-1:c64b88bc-0cc7-41f7-af90-1e96b384d8a5: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c64b88bc-0cc7-41f7-af90-1e96b384d8a5 could not be found. [ 780.172275] env[61964]: INFO nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 [ 780.231413] env[61964]: DEBUG nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 780.231630] env[61964]: DEBUG nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 780.337041] env[61964]: INFO nova.scheduler.client.report [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] [req-bac6c174-cca1-4d69-be4e-75a1b2d994ed] Created resource provider record via placement API for resource provider with UUID c64b88bc-0cc7-41f7-af90-1e96b384d8a5 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 780.354306] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60728379-fe0b-4882-98ff-96115ba3d263 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.361796] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79f3ad5-64de-441d-a41d-168381e6387e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.391054] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19aaccd6-32e3-45cc-962a-0f020c037c48 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.397791] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c7e92d-fa4e-4b8c-b934-6ccc1d3fe209 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.410538] env[61964]: DEBUG nova.compute.provider_tree [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.450496] env[61964]: DEBUG nova.scheduler.client.report [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Updated inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 780.450743] env[61964]: DEBUG nova.compute.provider_tree [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 0 to 1 during operation: update_inventory {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 780.450893] env[61964]: DEBUG nova.compute.provider_tree [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.504050] env[61964]: DEBUG nova.compute.provider_tree [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 1 to 2 during operation: update_traits {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 780.521797] env[61964]: DEBUG nova.compute.resource_tracker [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 780.522012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.377s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 780.522181] env[61964]: DEBUG nova.service [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Creating RPC server for service compute {{(pid=61964) start /opt/stack/nova/nova/service.py:182}} [ 780.536948] env[61964]: DEBUG nova.service [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] Join ServiceGroup membership for this service compute {{(pid=61964) start /opt/stack/nova/nova/service.py:199}} [ 780.537168] env[61964]: DEBUG nova.servicegroup.drivers.db [None req-816aa8f9-69b5-4d10-9b57-7c4161529415 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61964) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 789.835337] env[61964]: DEBUG dbcounter [-] [61964] Writing DB stats nova_cell1:SELECT=1 {{(pid=61964) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 789.839083] env[61964]: DEBUG dbcounter [-] [61964] Writing DB stats nova_cell0:SELECT=1 {{(pid=61964) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 802.541510] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.552288] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 802.552288] env[61964]: value = "domain-c8" [ 802.552288] env[61964]: _type = "ClusterComputeResource" [ 802.552288] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 802.553587] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96234ef-d63a-4e26-9f67-83be64cb45b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.562639] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 0 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 802.562858] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.563166] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 802.563166] env[61964]: value = "domain-c8" [ 802.563166] env[61964]: _type = "ClusterComputeResource" [ 802.563166] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 802.563977] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2feb29-36bf-4db3-9f3f-fbc12e3722e7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.571081] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 0 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 836.392497] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.392900] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.393018] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 836.394169] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 836.403422] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 836.403618] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.403838] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.404042] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.404769] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.405010] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.405215] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.405395] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 836.405555] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.416011] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.416254] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 836.416426] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 836.416570] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 836.417622] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea70219-b41e-4073-b122-61f33289fa9e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.426347] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c59b87-fb95-4eb5-ba96-acc2d2f616e4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.441193] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8f0714-0b6f-432a-8286-74b499d150a1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.447521] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf2c69-27a3-4193-8e11-d946560bb82e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.475447] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181352MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 836.475581] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.475746] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 836.504861] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 836.505036] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 836.518912] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb88eddf-f3c2-45c8-8b28-a2772584f0be {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.526198] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd74ada9-3dfe-45e1-a8ca-7e3236e82a5f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.556218] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec8a92b-8c89-4af1-aa3a-0473f6b68431 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.563374] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63626845-42a6-4c7f-859f-61398e52066a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.575995] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.583835] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 836.584929] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 836.585108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.109s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 896.572022] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.572438] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.582505] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.582643] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 896.582756] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 896.591289] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 896.591481] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.384283] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.384569] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.384661] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.383845] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.384171] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.384236] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 898.384386] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.396017] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 898.396017] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 898.396017] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 898.396017] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 898.396017] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539ac9bc-e0cc-45c5-bb52-ce2b29c81fc5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.405964] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cb62d4-3f90-4215-95d8-eb0d1291af39 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.420088] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e938f7bd-27c6-424f-87da-58e7d43d1f94 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.426445] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80321ae3-7390-4293-ad2d-1420e92fabb4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.455486] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181336MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 898.455650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 898.455823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 898.486475] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 898.486651] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 898.500618] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417ab9b3-9aa1-4ab9-bbff-374b8f7156d3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.508632] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c5fc22-cbf1-49fa-b163-6de2595b1ce1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.537779] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3de7c5-fc2e-47e0-b612-91e72896f998 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.545162] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7842a6bb-98a8-4e3f-b705-8a8c145f4579 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.558283] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.566813] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 898.567907] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 898.568095] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.112s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 956.568484] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.385768] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.385768] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 957.385768] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 957.395313] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 957.395499] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.395651] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.389717] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.384318] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.384590] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.384691] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.394525] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 959.394857] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 959.394935] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 959.395036] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 959.396164] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea662be-1df3-4576-a98b-5f3a3d5a0f95 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.404665] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cea006d-2480-4b4d-a9b4-b5e94ee9f5b7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.417966] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd19eca-b018-4869-9280-3e97892c95d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.423869] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a202b8-8e6d-4260-8123-f530529e793f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.451307] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181338MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 959.451446] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 959.451647] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 959.480588] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 959.480747] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 959.493385] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ab3fe0-b03b-455d-ba82-24b93d7f3a09 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.501296] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff29f12a-f726-440b-9059-1db9e3aeb356 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.529622] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032079ba-1b99-4507-a1ca-4ea6f2e8656d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.536058] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5be85d2-ee1b-4833-810c-cd0ec1bbe727 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.549277] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.557434] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 959.558574] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 959.558738] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.107s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 960.557686] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.558057] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1017.385091] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.385486] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1017.385486] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1017.394422] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1017.394645] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.394777] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.384370] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.384802] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.384802] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.379623] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.383279] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.383279] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1021.379702] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.391137] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.401761] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1021.401990] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1021.402167] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1021.402321] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1021.403508] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d0a0bd-24b8-41ad-a9a1-e906a3ae25f8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.413195] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0638a68-f73e-48d0-9b9b-f68e2c22533d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.427231] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22733be-4548-454c-b80a-08ce4d9fcbd1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.433409] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1a6fab-a409-4aaf-ba5b-c960753f7c01 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.461802] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181346MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1021.461990] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1021.462162] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1021.492104] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1021.492293] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1021.506826] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291c16d0-828c-40a7-853c-cd4f73873d6f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.514608] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb62cb7-6b88-48d5-9ad6-76c4d43030c5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.544492] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238319a0-e445-4511-9191-2352303e31a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.551619] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9389234e-238b-42f1-a2b0-26c1f85c76d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.564595] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.573386] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1021.574558] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1021.574731] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.113s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1076.384967] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.385288] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1076.397960] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1076.398178] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.398338] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1076.406428] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.419891] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.383829] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.384237] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1079.384237] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1079.392902] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1079.393116] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.393278] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.393436] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.384637] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.385013] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1081.380404] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1081.383103] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.385698] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.396123] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1083.396333] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1083.396502] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1083.396654] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1083.397709] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d8efab-ae24-479d-b93b-2f92024009bc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.406578] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4af549-360a-4cf7-81a8-01acd26fba43 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.420680] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3abd01d-3572-48f6-b3d7-9280331bd0e4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.426648] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af97e071-981f-4933-b9a3-1fb7a0c2aaf5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.454487] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181337MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1083.454622] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1083.454800] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1083.523909] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1083.524104] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1083.539693] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1083.552849] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1083.553046] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1083.564782] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1083.580445] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1083.592111] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9d39cc-e8d2-4c38-9abb-5527cdb0a163 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.599481] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ef0851-3c63-4bd0-a229-ca880d538ae5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.629135] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2ccbb7-ac68-48bb-9669-7682025c443d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.636244] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dd720e-c75f-4dc5-84a9-9e16c3595cb2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.648980] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.657024] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1083.658131] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1083.658309] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.203s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1138.659051] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384287] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384543] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1139.384621] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1139.393598] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1139.393825] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.384888] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.385333] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.385333] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.385504] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1142.380654] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.379489] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.388925] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.389170] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.398040] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1143.398248] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1143.398411] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1143.398560] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1143.399618] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1385be4c-e90a-4f96-9f2f-113107589de5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.408375] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c414d5-4c16-42e0-a847-d43c64b15a71 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.421816] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095d7283-151f-4bb1-aa87-d1a586e6cd40 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.427707] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff9b6e9-0b57-411a-b3dd-38074ab7e75c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.456747] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1143.456916] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1143.457087] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1143.486593] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1143.486761] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1143.500030] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a153bd-8046-4db4-8fa3-9bc44ff34853 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.506467] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fca985-f802-43f6-896c-dd79739fc6aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.534805] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7131115-56f8-4ef3-b786-75b46e02cda9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.541485] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184b5684-8731-4993-be45-ea378f75d8b4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.554181] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.561936] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1143.563139] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1143.563313] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.106s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1198.560496] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.384481] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.384873] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1200.384873] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1200.393834] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1200.394057] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.384250] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.384492] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.384640] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1202.380264] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.383567] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.384867] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.384078] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.395797] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1205.396156] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1205.396194] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1205.396315] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1205.397428] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e0cf04-ea68-4276-b86e-20046fa86a6f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.406631] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab8d340-4787-442c-8b72-4be6f6ee8ce1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.420311] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ae43e5-42f4-4b40-9f64-b98bc48d387c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.426716] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d5eba3-3a5e-406d-bc2b-03bde2bf81b4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.455660] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181339MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1205.455830] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1205.455994] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1205.488358] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1205.488536] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1205.505529] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d96c17-594e-472b-9bec-45c9ad3496d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.514245] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d67a362-f05a-4ba6-bd30-17ed676ae7a4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.544177] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55f351b-27c1-43b2-a419-ca63303e5dfe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.551152] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa45fd89-3cb6-4702-b5e8-3ad8e35f9a03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.563604] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.571770] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1205.572910] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1205.573116] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.117s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1258.575184] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.384461] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.384893] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1261.384893] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1261.394487] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1261.394690] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.394857] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.384061] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.384061] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.384413] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1264.384626] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.380397] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.391441] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.384496] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.396338] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1267.396585] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1267.396770] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1267.396931] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1267.398147] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e6a668-fa20-4021-bfbb-4a4b27b44473 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.408074] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b9fb25-fe69-4546-bbf5-962528a9e38e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.423367] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a723585b-3fd1-450c-97f4-8b0b1f7c1db9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.430613] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a350fcc2-85c5-4e94-bb8d-f8fa7a95144b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.460450] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181335MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1267.460658] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1267.460813] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1267.494387] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1267.494615] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1267.509574] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f38519-52f4-4d25-b4a6-30e67f66e4e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.517653] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b9a503-231c-4ffd-837e-e95a12d19e25 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.548720] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf11305d-ddab-412a-866a-59de21f7cee6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.556939] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4770a45d-2e8d-4d71-9a83-57a43079d394 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.571018] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.579993] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1267.581175] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1267.581373] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.121s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1320.581675] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.385343] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.385875] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1322.385875] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1322.394664] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1323.384320] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.384606] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.380677] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.383349] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.383505] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1325.383699] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.384099] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.384935] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.395888] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1327.396114] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1327.396288] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1327.396444] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1327.397615] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a035ff-d10c-44a1-afe0-4ef105487818 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.407369] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9944bbc5-76f2-4600-a345-7713c0876444 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.423777] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462acd52-18fd-48c3-b46e-059097ad5ce8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.431186] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286516a1-ae09-4759-b97a-09798c81c436 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.461350] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181327MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1327.461544] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1327.461715] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1327.494988] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1327.495177] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1327.510494] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844042f4-4420-4c2d-a132-d19c21d49faa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.518518] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e778ea-e36a-47f1-be53-c474772624ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.547837] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7553b96-3682-40cd-b948-193afa68b3ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.555816] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78b302b-0707-41f8-bf70-fbc252546ee2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.570479] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.579405] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1327.580613] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1327.580794] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.119s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1380.579640] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.386072] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.386072] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1382.386072] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1382.394323] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1383.384547] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.384704] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1385.385620] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.385988] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.385988] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.386143] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.386304] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.386273] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.386679] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.397479] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.397479] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1387.384696] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.384933] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1387.397698] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1388.397546] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.408459] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1388.408939] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1388.409031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1388.409239] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1388.410649] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ea3987-1d45-41ea-95d3-69d7ad85385d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.420389] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7d807a-49f0-434a-aaff-5135e014b592 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.435515] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc625a5c-1e22-4124-a04c-4ff14869396e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.442439] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9310d982-bdd4-482b-b346-8ac3ad9dfec0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.471918] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181311MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1388.472092] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1388.472296] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1388.572820] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1388.573064] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1388.589024] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1388.602509] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1388.602689] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1388.612916] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1388.628093] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1388.639485] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998ebb74-959a-4c0a-b31d-444ea44cbfe3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.646949] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3971501-e964-4e23-968b-a3bb124ffe7d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.675802] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fabe2c-3801-4640-b47d-c4c6e1eb326e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.682502] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2197683d-3c58-4077-83fb-d5e78d6b488d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.695903] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.704738] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1388.705842] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1388.706014] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.234s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1402.853242] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.861789] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 1402.861789] env[61964]: value = "domain-c8" [ 1402.861789] env[61964]: _type = "ClusterComputeResource" [ 1402.861789] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1402.862808] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970ecad5-9f80-48dd-b4e6-4659f4f5de44 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.871812] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 0 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1425.798134] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "f99b3d5c-9f51-4815-9507-6522e57e715f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1425.799457] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "f99b3d5c-9f51-4815-9507-6522e57e715f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1425.825556] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1425.963536] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1425.963793] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1425.966554] env[61964]: INFO nova.compute.claims [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1426.161759] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0581c1-f1ec-40a5-a139-f0b28b5b93f0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.178021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc8ef7f-5f36-4a1f-b77e-c7fa2fe6fe42 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.219268] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd80ba33-e45d-4bb2-9ccc-06a945c104ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.228552] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b48c14-7109-4428-9ad0-95ae76129022 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.246691] env[61964]: DEBUG nova.compute.provider_tree [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.264101] env[61964]: DEBUG nova.scheduler.client.report [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1426.296009] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.332s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1426.296729] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1426.364321] env[61964]: DEBUG nova.compute.utils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1426.366702] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1426.366963] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1426.391009] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1426.506229] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1427.256711] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.256711] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.256711] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.257204] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.257204] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.257204] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.257309] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.257463] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.257523] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.257697] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.257884] env[61964]: DEBUG nova.virt.hardware [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.260150] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4265e81-f91d-442b-b517-d839b6012038 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.271730] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0d1185-d8c7-4cb8-b9e7-e21f7c98d8b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.300088] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04c99f0-fb23-495a-bcd3-09fbbb6469fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.627594] env[61964]: DEBUG nova.policy [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6248ef6875d14682acc3150785f8df03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b23d36132b844358e4741781b8c8420', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1428.475091] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Successfully created port: b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1428.853289] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "408cfd06-df36-46a4-9a6c-86dc91339712" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1428.853711] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "408cfd06-df36-46a4-9a6c-86dc91339712" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1428.879035] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1428.960246] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1428.960508] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1428.961981] env[61964]: INFO nova.compute.claims [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.102094] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89de6145-e7be-46b4-bf21-ae61d0c676ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.112552] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc006fc1-3727-4db2-bb2e-968140177831 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.146365] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cd9ffb-5dc1-4b96-9ab3-555a87c48733 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.154305] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869155d0-a0ef-474f-b2fd-95e3d4591bf0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.168320] env[61964]: DEBUG nova.compute.provider_tree [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.180870] env[61964]: DEBUG nova.scheduler.client.report [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1429.203951] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1429.203951] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1429.292546] env[61964]: DEBUG nova.compute.utils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1429.297104] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Not allocating networking since 'none' was specified. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1429.317701] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1429.401325] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1429.437649] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1429.437907] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1429.438076] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1429.438265] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1429.438412] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1429.438560] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1429.438770] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1429.438925] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1429.439108] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1429.439290] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1429.439442] env[61964]: DEBUG nova.virt.hardware [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1429.444093] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d8bfe8-d7cf-4c0e-a831-7dd4aa1933cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.451728] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a34eb5-9c78-4b61-92d8-8c3ecab0679a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.011861] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Instance VIF info [] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1430.020825] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1430.021230] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c506f081-4ef7-40c4-834f-09d178a12b2b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.034116] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Created folder: OpenStack in parent group-v4. [ 1430.034797] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating folder: Project (6bc43e0d6c5d454b8fca522684a1aa0d). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1430.034797] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-265ba41c-defc-4e28-accc-956cb7b028d4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.048965] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Created folder: Project (6bc43e0d6c5d454b8fca522684a1aa0d) in parent group-v351942. [ 1430.049219] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating folder: Instances. Parent ref: group-v351943. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1430.049416] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bed0ea91-1409-43a0-a4bc-25213c3a8ed9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.058580] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Created folder: Instances in parent group-v351943. [ 1430.058864] env[61964]: DEBUG oslo.service.loopingcall [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1430.059290] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1430.059290] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c521d45-890c-4a39-acb6-31dd01d24091 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.078148] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1430.078148] env[61964]: value = "task-1688556" [ 1430.078148] env[61964]: _type = "Task" [ 1430.078148] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.086945] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688556, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.593285] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688556, 'name': CreateVM_Task, 'duration_secs': 0.387437} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.594562] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1430.594562] env[61964]: DEBUG oslo_vmware.service [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294e9f76-23f8-4257-b310-1061f546512e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.600842] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1430.600999] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1430.601662] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1430.601916] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d3961b-bcbe-4444-ad48-0bcdf842e440 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.610743] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "36056842-1c0b-4f4c-a512-e250fc657620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1430.610961] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "36056842-1c0b-4f4c-a512-e250fc657620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1430.612180] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for the task: (returnval){ [ 1430.612180] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527512bd-885b-333a-27e0-e3defea95fab" [ 1430.612180] env[61964]: _type = "Task" [ 1430.612180] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.620863] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527512bd-885b-333a-27e0-e3defea95fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.627573] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1430.695890] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1430.696157] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1430.697818] env[61964]: INFO nova.compute.claims [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1430.820577] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7a5906-a179-406f-8e31-c54bd7166302 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.828582] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5977ce3c-f8af-4773-a796-88fc30d0382e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.863417] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca31d02-229c-4d2c-bde9-8cbf90fc3add {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.871721] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ac6eb4-f9a5-4f16-972a-549a2606e0d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.887494] env[61964]: DEBUG nova.compute.provider_tree [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.898613] env[61964]: DEBUG nova.scheduler.client.report [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1430.914415] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.218s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1430.914905] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1430.956863] env[61964]: DEBUG nova.compute.utils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1430.958356] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1430.958632] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1430.968927] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1431.076840] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1431.126324] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1431.126664] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1431.127128] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1431.127128] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1431.127914] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1431.127914] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1431.127914] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1431.127914] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1431.128093] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1431.128408] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1431.128408] env[61964]: DEBUG nova.virt.hardware [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1431.129666] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736fcdcf-77a6-40e5-acf8-dce643849953 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.150905] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1431.151108] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.151350] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1431.151487] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1431.151898] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.153603] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dd04610-f38c-4a67-93b7-3926823b1c35 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.162037] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b6e1c2-9ec8-441f-a41a-9118d14c0996 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.182510] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.182934] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1431.183992] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af8b2e5-4abf-43f5-a334-f09822e3362c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.191720] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d362d81-3c9c-466d-abeb-3edcae32c438 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.197394] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for the task: (returnval){ [ 1431.197394] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5247661a-e79c-bc6c-9416-b90e7b08ab48" [ 1431.197394] env[61964]: _type = "Task" [ 1431.197394] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.201991] env[61964]: DEBUG nova.policy [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7efaeca7b25e4b28b290f327f7f335c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3765bc7b039b4868a96b6ec336cb318a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1431.208382] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5247661a-e79c-bc6c-9416-b90e7b08ab48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.708057] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1431.708606] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating directory with path [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.708650] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2a374ca-a74d-4e38-873b-db964f8c4ae4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.745259] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Created directory with path [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.745259] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Fetch image to [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1431.745442] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1431.746292] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d52a80-e09a-4d6e-ad4b-b536eebaeed6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.755606] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef261e7-e8ad-4bbf-a0ae-4e9bddc0b6d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.767326] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbfb6c0-5c1d-4f90-b499-3c25abda046e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.808815] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246f0672-3b54-41e9-9662-74d32281f2ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.816104] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-83164cfc-4db3-42d0-b6ea-a1d0b39d77b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.904044] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1431.935887] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Successfully created port: f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1431.978739] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1432.059484] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1432.059484] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1432.246029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1432.246029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1432.257699] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1432.333543] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1432.333804] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1432.335666] env[61964]: INFO nova.compute.claims [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.510368] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bec9be-3125-403c-b98c-5a7d22257cbc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.528219] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e38f42-2d51-4dba-adcc-4538ff5feb3d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.569301] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db99f236-b6aa-43f1-a16c-9ab629fa5ea9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.579625] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e15b9d-8193-44e7-8bc2-27ccc2284c0e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.601302] env[61964]: DEBUG nova.compute.provider_tree [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.615931] env[61964]: DEBUG nova.scheduler.client.report [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1432.633196] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1432.633488] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1432.679055] env[61964]: DEBUG nova.compute.utils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1432.683238] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1432.683238] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1432.696861] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1432.796411] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1432.827113] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1432.827113] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1432.827113] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.827317] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1432.828137] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.828137] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1432.828137] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1432.828137] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1432.828137] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1432.828420] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1432.828470] env[61964]: DEBUG nova.virt.hardware [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1432.829382] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ed1e60-31dc-44d0-a4d9-b20ad9459e37 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.838159] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb640b1b-17a7-4177-ab39-6095849f5dfb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.015550] env[61964]: DEBUG nova.policy [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02b47311e20c43d1a9aeca144d7b138c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0985be43dfdf4488bae7458257f1ced1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1433.638458] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Successfully updated port: b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1433.660669] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1433.663224] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquired lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1433.666035] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1433.809138] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1434.126776] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Successfully updated port: f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1434.147612] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1434.147760] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1434.147905] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1434.301880] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Updating instance_info_cache with network_info: [{"id": "b3b71fa4-f742-4837-8b4f-3880c2617115", "address": "fa:16:3e:c5:5b:f6", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b71fa4-f7", "ovs_interfaceid": "b3b71fa4-f742-4837-8b4f-3880c2617115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.324320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Releasing lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1434.324634] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Instance network_info: |[{"id": "b3b71fa4-f742-4837-8b4f-3880c2617115", "address": "fa:16:3e:c5:5b:f6", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b71fa4-f7", "ovs_interfaceid": "b3b71fa4-f742-4837-8b4f-3880c2617115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1434.325015] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:5b:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3b71fa4-f742-4837-8b4f-3880c2617115', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1434.334201] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Creating folder: Project (2b23d36132b844358e4741781b8c8420). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1434.334537] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cb452a4-deb7-4ce3-9866-103ec2749d86 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.337491] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1434.352139] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Created folder: Project (2b23d36132b844358e4741781b8c8420) in parent group-v351942. [ 1434.352139] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Creating folder: Instances. Parent ref: group-v351946. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1434.352139] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-129f95bc-d4be-4adf-9f3e-dc0af4e43e15 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.368029] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Created folder: Instances in parent group-v351946. [ 1434.368029] env[61964]: DEBUG oslo.service.loopingcall [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.368029] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1434.368029] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f149260-ddcf-4e64-8373-35a3511fc94a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.393493] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1434.393493] env[61964]: value = "task-1688559" [ 1434.393493] env[61964]: _type = "Task" [ 1434.393493] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.402125] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Successfully created port: 8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1434.410169] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688559, 'name': CreateVM_Task} progress is 6%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.907153] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688559, 'name': CreateVM_Task, 'duration_secs': 0.373755} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.907456] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1434.934337] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1434.934337] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1434.934337] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1434.934337] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5309fb62-66b2-46af-9afb-ea55d43f8f07 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.940692] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Waiting for the task: (returnval){ [ 1434.940692] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a4c37-e042-556b-fd49-044197b34c31" [ 1434.940692] env[61964]: _type = "Task" [ 1434.940692] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.952021] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a4c37-e042-556b-fd49-044197b34c31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.299311] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Updating instance_info_cache with network_info: [{"id": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "address": "fa:16:3e:ac:45:30", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72b6dbd-36", "ovs_interfaceid": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.314289] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1435.315028] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Instance network_info: |[{"id": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "address": "fa:16:3e:ac:45:30", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72b6dbd-36", "ovs_interfaceid": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1435.315743] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:45:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f72b6dbd-3644-42d8-b4bc-cdce88ebb187', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1435.323810] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating folder: Project (3765bc7b039b4868a96b6ec336cb318a). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1435.324552] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-035a5d7e-3a5d-4f6b-a5be-0d2dbe6bc338 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.338689] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created folder: Project (3765bc7b039b4868a96b6ec336cb318a) in parent group-v351942. [ 1435.338952] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating folder: Instances. Parent ref: group-v351949. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1435.339237] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0c6cab0-c3b9-450e-8fe5-5b965cf36ab3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.352543] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created folder: Instances in parent group-v351949. [ 1435.352791] env[61964]: DEBUG oslo.service.loopingcall [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.352983] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1435.353206] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57d9de35-f4a6-4bb9-aac7-a50b40626d32 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.374945] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1435.374945] env[61964]: value = "task-1688562" [ 1435.374945] env[61964]: _type = "Task" [ 1435.374945] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.383282] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688562, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.394480] env[61964]: DEBUG nova.compute.manager [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Received event network-vif-plugged-b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1435.394698] env[61964]: DEBUG oslo_concurrency.lockutils [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] Acquiring lock "f99b3d5c-9f51-4815-9507-6522e57e715f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1435.394946] env[61964]: DEBUG oslo_concurrency.lockutils [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] Lock "f99b3d5c-9f51-4815-9507-6522e57e715f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1435.395136] env[61964]: DEBUG oslo_concurrency.lockutils [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] Lock "f99b3d5c-9f51-4815-9507-6522e57e715f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1435.395660] env[61964]: DEBUG nova.compute.manager [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] No waiting events found dispatching network-vif-plugged-b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1435.395895] env[61964]: WARNING nova.compute.manager [req-d7a9738d-8665-4ae9-97aa-00f814f5dd54 req-26865baf-085a-42d0-a801-16216415e22f service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Received unexpected event network-vif-plugged-b3b71fa4-f742-4837-8b4f-3880c2617115 for instance with vm_state building and task_state spawning. [ 1435.452908] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a4c37-e042-556b-fd49-044197b34c31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.885959] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688562, 'name': CreateVM_Task, 'duration_secs': 0.42595} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.885959] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1435.886970] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1435.955938] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a4c37-e042-556b-fd49-044197b34c31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.375840] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Successfully updated port: 8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1436.386029] env[61964]: DEBUG nova.compute.manager [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Received event network-vif-plugged-f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1436.386029] env[61964]: DEBUG oslo_concurrency.lockutils [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] Acquiring lock "36056842-1c0b-4f4c-a512-e250fc657620-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.386029] env[61964]: DEBUG oslo_concurrency.lockutils [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] Lock "36056842-1c0b-4f4c-a512-e250fc657620-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1436.386029] env[61964]: DEBUG oslo_concurrency.lockutils [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] Lock "36056842-1c0b-4f4c-a512-e250fc657620-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1436.386533] env[61964]: DEBUG nova.compute.manager [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] No waiting events found dispatching network-vif-plugged-f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1436.387198] env[61964]: WARNING nova.compute.manager [req-073f55c8-8243-44e7-a798-5ffa9c08234d req-c514003c-a15e-4f99-8b38-83cb192ff4f6 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Received unexpected event network-vif-plugged-f72b6dbd-3644-42d8-b4bc-cdce88ebb187 for instance with vm_state building and task_state spawning. [ 1436.391548] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1436.391787] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquired lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1436.393442] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1436.462247] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1436.462247] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1436.462247] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1436.462247] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1436.463067] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1436.463067] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11344e0b-94c9-46a9-bcc6-cac65c79af66 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.471034] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 1436.471034] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52946851-a0de-3897-669d-59991d838f17" [ 1436.471034] env[61964]: _type = "Task" [ 1436.471034] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.479960] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52946851-a0de-3897-669d-59991d838f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.677490] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1436.989383] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52946851-a0de-3897-669d-59991d838f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.224920] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Updating instance_info_cache with network_info: [{"id": "8447ec38-2062-4ef8-be01-90c5985b3692", "address": "fa:16:3e:5f:79:89", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8447ec38-20", "ovs_interfaceid": "8447ec38-2062-4ef8-be01-90c5985b3692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.252736] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Releasing lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1437.252736] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Instance network_info: |[{"id": "8447ec38-2062-4ef8-be01-90c5985b3692", "address": "fa:16:3e:5f:79:89", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8447ec38-20", "ovs_interfaceid": "8447ec38-2062-4ef8-be01-90c5985b3692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1437.253315] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:79:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8447ec38-2062-4ef8-be01-90c5985b3692', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.263670] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Creating folder: Project (0985be43dfdf4488bae7458257f1ced1). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1437.264371] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50fb85b4-9bf4-44d5-ba47-b21b5a9afac1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.281020] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Created folder: Project (0985be43dfdf4488bae7458257f1ced1) in parent group-v351942. [ 1437.281248] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Creating folder: Instances. Parent ref: group-v351952. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1437.281496] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1511c8e8-58d8-40d1-aa3f-665bf20e7200 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.292682] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Created folder: Instances in parent group-v351952. [ 1437.292930] env[61964]: DEBUG oslo.service.loopingcall [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.293135] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1437.293337] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b469c76-46ac-4b7e-a2e3-1ffeda46d8e3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.319952] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.319952] env[61964]: value = "task-1688565" [ 1437.319952] env[61964]: _type = "Task" [ 1437.319952] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.328892] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688565, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.482025] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52946851-a0de-3897-669d-59991d838f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.832360] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688565, 'name': CreateVM_Task} progress is 6%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.983019] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52946851-a0de-3897-669d-59991d838f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.331904] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688565, 'name': CreateVM_Task} progress is 15%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.481456] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1438.481726] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.481953] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1438.838159] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688565, 'name': CreateVM_Task, 'duration_secs': 1.311918} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.838364] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1438.839031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1438.839193] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1438.839499] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1438.839753] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88500c0d-9897-4dee-be8c-2644c240aeca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.844896] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Waiting for the task: (returnval){ [ 1438.844896] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cfd137-e1f2-ce07-8200-1d38e9f3a227" [ 1438.844896] env[61964]: _type = "Task" [ 1438.844896] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.862652] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1438.862652] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.862652] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1440.363823] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Received event network-changed-b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1440.364417] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Refreshing instance network info cache due to event network-changed-b3b71fa4-f742-4837-8b4f-3880c2617115. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1440.364417] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Acquiring lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1440.364417] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Acquired lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1440.367530] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Refreshing network info cache for port b3b71fa4-f742-4837-8b4f-3880c2617115 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1440.888611] env[61964]: DEBUG nova.compute.manager [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Received event network-changed-f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1440.888812] env[61964]: DEBUG nova.compute.manager [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Refreshing instance network info cache due to event network-changed-f72b6dbd-3644-42d8-b4bc-cdce88ebb187. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1440.889057] env[61964]: DEBUG oslo_concurrency.lockutils [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] Acquiring lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1440.889419] env[61964]: DEBUG oslo_concurrency.lockutils [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] Acquired lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1440.889493] env[61964]: DEBUG nova.network.neutron [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Refreshing network info cache for port f72b6dbd-3644-42d8-b4bc-cdce88ebb187 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1441.222383] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.222802] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1441.241515] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1441.335922] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.336174] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1441.338203] env[61964]: INFO nova.compute.claims [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1441.402764] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.506131] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6722714e-4703-4a6d-bb5a-01d783e50836 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.510034] env[61964]: DEBUG nova.network.neutron [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Updated VIF entry in instance network info cache for port f72b6dbd-3644-42d8-b4bc-cdce88ebb187. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1441.510034] env[61964]: DEBUG nova.network.neutron [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Updating instance_info_cache with network_info: [{"id": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "address": "fa:16:3e:ac:45:30", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf72b6dbd-36", "ovs_interfaceid": "f72b6dbd-3644-42d8-b4bc-cdce88ebb187", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.516654] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5274f621-01b7-4621-bf86-336d1dd5689d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.524665] env[61964]: DEBUG oslo_concurrency.lockutils [req-99edb4b2-a2d5-4999-8819-9c0ba9794b0e req-58117ca5-5c49-4a31-9620-a4fd13b14936 service nova] Releasing lock "refresh_cache-36056842-1c0b-4f4c-a512-e250fc657620" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1441.551235] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da93d8fd-034b-429e-afe4-233070bd9341 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.564949] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a940daa-8b8b-468d-9ab8-f36d36599e1f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.581313] env[61964]: DEBUG nova.compute.provider_tree [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.594975] env[61964]: DEBUG nova.scheduler.client.report [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1441.609606] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.273s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1441.610113] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1441.649391] env[61964]: DEBUG nova.compute.utils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1441.650689] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1441.650864] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1441.673184] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1441.750437] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Updated VIF entry in instance network info cache for port b3b71fa4-f742-4837-8b4f-3880c2617115. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1441.750781] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Updating instance_info_cache with network_info: [{"id": "b3b71fa4-f742-4837-8b4f-3880c2617115", "address": "fa:16:3e:c5:5b:f6", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3b71fa4-f7", "ovs_interfaceid": "b3b71fa4-f742-4837-8b4f-3880c2617115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.764876] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Releasing lock "refresh_cache-f99b3d5c-9f51-4815-9507-6522e57e715f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1441.765153] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Received event network-vif-plugged-8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1441.765340] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Acquiring lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.769036] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1441.769036] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1441.769036] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] No waiting events found dispatching network-vif-plugged-8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1441.769036] env[61964]: WARNING nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Received unexpected event network-vif-plugged-8447ec38-2062-4ef8-be01-90c5985b3692 for instance with vm_state building and task_state spawning. [ 1441.769410] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Received event network-changed-8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1441.769410] env[61964]: DEBUG nova.compute.manager [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Refreshing instance network info cache due to event network-changed-8447ec38-2062-4ef8-be01-90c5985b3692. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1441.769410] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Acquiring lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1441.769410] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Acquired lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1441.769410] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Refreshing network info cache for port 8447ec38-2062-4ef8-be01-90c5985b3692 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1441.772015] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1441.817029] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1441.817029] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1441.817029] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1441.817252] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1441.817252] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1441.817252] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1441.817252] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1441.817252] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1441.817424] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1441.817424] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1441.817424] env[61964]: DEBUG nova.virt.hardware [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1441.821026] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afaf61a-ea70-4578-9b93-4438d5844a8a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.828441] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0455a7-206f-4ee2-898f-6cd3328cf600 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.853649] env[61964]: DEBUG nova.policy [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd05d7091fed486c86fa2b488d19e2a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c63133f1af9f4cdca3ed330f5f6a4044', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1442.384639] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.384909] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1442.385050] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1442.416471] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1442.416768] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1442.417994] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1442.417994] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1442.417994] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1442.417994] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1443.045391] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Successfully created port: 1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1443.115980] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Updated VIF entry in instance network info cache for port 8447ec38-2062-4ef8-be01-90c5985b3692. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1443.115980] env[61964]: DEBUG nova.network.neutron [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Updating instance_info_cache with network_info: [{"id": "8447ec38-2062-4ef8-be01-90c5985b3692", "address": "fa:16:3e:5f:79:89", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8447ec38-20", "ovs_interfaceid": "8447ec38-2062-4ef8-be01-90c5985b3692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.135392] env[61964]: DEBUG oslo_concurrency.lockutils [req-8a6abbb3-c42d-4437-b7a9-558dbcdd35d6 req-e73b84b1-95de-4a6f-bee8-4add61e62534 service nova] Releasing lock "refresh_cache-41046d8c-861e-4bb4-8f7f-ae7c4d494964" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1445.387011] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.602169] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Successfully updated port: 1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1445.620119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1445.620270] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquired lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1445.620418] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1445.729514] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1446.122303] env[61964]: DEBUG nova.compute.manager [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Received event network-vif-plugged-1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1446.124246] env[61964]: DEBUG oslo_concurrency.lockutils [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] Acquiring lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1446.124295] env[61964]: DEBUG oslo_concurrency.lockutils [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1446.124528] env[61964]: DEBUG oslo_concurrency.lockutils [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1446.124652] env[61964]: DEBUG nova.compute.manager [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] No waiting events found dispatching network-vif-plugged-1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1446.126056] env[61964]: WARNING nova.compute.manager [req-bdd3b5d6-0cf9-4b21-ba93-bcb14bcdb10e req-390a962b-0c38-4c74-84bc-0eabcb29e088 service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Received unexpected event network-vif-plugged-1f9ead04-6813-450f-9eac-840d0aa4130a for instance with vm_state building and task_state spawning. [ 1446.196194] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Updating instance_info_cache with network_info: [{"id": "1f9ead04-6813-450f-9eac-840d0aa4130a", "address": "fa:16:3e:02:cf:f2", "network": {"id": "3513ca3b-8d79-441b-b1e0-9edb1d4da1b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-107780581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c63133f1af9f4cdca3ed330f5f6a4044", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9ead04-68", "ovs_interfaceid": "1f9ead04-6813-450f-9eac-840d0aa4130a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.214565] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Releasing lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1446.214888] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance network_info: |[{"id": "1f9ead04-6813-450f-9eac-840d0aa4130a", "address": "fa:16:3e:02:cf:f2", "network": {"id": "3513ca3b-8d79-441b-b1e0-9edb1d4da1b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-107780581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c63133f1af9f4cdca3ed330f5f6a4044", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9ead04-68", "ovs_interfaceid": "1f9ead04-6813-450f-9eac-840d0aa4130a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1446.215282] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:cf:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f9ead04-6813-450f-9eac-840d0aa4130a', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1446.226879] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Creating folder: Project (c63133f1af9f4cdca3ed330f5f6a4044). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1446.227569] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f73adee-83b1-447a-8739-8a191ea21da3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.241455] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Created folder: Project (c63133f1af9f4cdca3ed330f5f6a4044) in parent group-v351942. [ 1446.241685] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Creating folder: Instances. Parent ref: group-v351955. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1446.241941] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-137cb8bc-1915-4b37-a7d8-40f74bbc68d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.255234] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Created folder: Instances in parent group-v351955. [ 1446.256081] env[61964]: DEBUG oslo.service.loopingcall [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1446.256081] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1446.256081] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90fecf79-68ef-48f0-b585-312f6155e939 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.286510] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1446.286510] env[61964]: value = "task-1688568" [ 1446.286510] env[61964]: _type = "Task" [ 1446.286510] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.299036] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688568, 'name': CreateVM_Task} progress is 5%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.381715] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.384488] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.384762] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.385013] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.385207] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1446.450454] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "c5dd385e-2447-4539-aed1-81d957076f5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1446.450754] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "c5dd385e-2447-4539-aed1-81d957076f5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1446.468205] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1446.543912] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1446.544187] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1446.545995] env[61964]: INFO nova.compute.claims [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1446.760861] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf477849-3e5a-4260-8d9b-a23d58d016bb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.772055] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e0648c-f8f3-4429-8d56-fed8e04cded3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.812567] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0915c77b-0054-45a4-aad1-2cf2ae45fbfc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.828853] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bfcdc9-dcc8-4384-8e1a-5bb4b849af2b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.833980] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688568, 'name': CreateVM_Task, 'duration_secs': 0.323533} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.833980] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1446.834909] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1446.835377] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1446.835715] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1446.844484] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b315412-12ac-4897-8c5d-8b349cefdf3b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.846535] env[61964]: DEBUG nova.compute.provider_tree [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.851216] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for the task: (returnval){ [ 1446.851216] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f9c467-f483-3ca5-aedf-fc2e8b69a96e" [ 1446.851216] env[61964]: _type = "Task" [ 1446.851216] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.861772] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f9c467-f483-3ca5-aedf-fc2e8b69a96e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.862794] env[61964]: DEBUG nova.scheduler.client.report [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1446.887659] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1446.889256] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1446.941678] env[61964]: DEBUG nova.compute.utils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.943593] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1446.947118] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1446.958131] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1447.070623] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1447.105896] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.106244] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.106244] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.106561] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.106561] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.106561] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.106782] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.107114] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.107114] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.107327] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.108102] env[61964]: DEBUG nova.virt.hardware [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.109056] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929ae990-39c9-4d7c-8eb4-5411f56a1428 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.118810] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b71d2ab-e708-437c-a222-6618a91d828c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.158052] env[61964]: DEBUG nova.policy [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '150a91a845b944a1907ba847ba2ac447', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c08b3d1d934b7ca7754ddf3411da7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1447.362392] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1447.362696] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1447.362955] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1447.383683] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.359683] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Successfully created port: 906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1448.750361] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "b8993737-d2ef-4987-8c91-d1320771434a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1448.750967] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1448.768676] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1448.847127] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1448.847127] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1448.847127] env[61964]: INFO nova.compute.claims [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1449.100877] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03de0c49-58f2-4981-b5ea-5f04dd45bf41 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.109378] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b590e5a3-a141-423e-bea1-77dd4ed7626f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.161634] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b4e44c-bfe9-4022-8644-8c469c8c35d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.167276] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1449.167520] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1449.177151] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6620e02d-785a-4a3c-8e9f-f057d43e77d2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.197873] env[61964]: DEBUG nova.compute.provider_tree [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.199682] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1449.211240] env[61964]: DEBUG nova.scheduler.client.report [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1449.240512] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.395s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1449.241079] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1449.293014] env[61964]: DEBUG nova.compute.utils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1449.294575] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1449.294742] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1449.315438] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1449.322436] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1449.322728] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1449.329324] env[61964]: INFO nova.compute.claims [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1449.384401] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1449.414758] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1449.419591] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1449.451441] env[61964]: DEBUG nova.policy [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '606c91ad1df742fa8af0648ec453df57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fff335fd73e441f0ac0f6e36f339abe2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1449.454756] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1449.455134] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1449.455615] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.455712] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1449.455893] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.456083] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1449.456346] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1449.456549] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1449.456826] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1449.457129] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1449.458046] env[61964]: DEBUG nova.virt.hardware [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1449.458905] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6cfc88-c12f-4f2e-87c2-bdb86065d00c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.470138] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9ac310-70f1-4909-ba8c-baa0240a0016 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.533089] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0591329b-54e0-471b-b4f2-9304b8eb5494 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.543698] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ab6c1e-cb69-423e-aefb-26f0809d2e2e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.592502] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112ee46c-e0b3-474a-90d8-3384ab8a16f9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.606381] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76369b7a-ba96-41de-b0c8-3a75b641ebf4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.632546] env[61964]: DEBUG nova.compute.provider_tree [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.648047] env[61964]: DEBUG nova.scheduler.client.report [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1449.683309] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.361s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1449.683813] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1449.686864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.273s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1449.687368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1449.687368] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1449.692537] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e09db-86a8-4de1-ba14-f33c7429c6ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.707736] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d36b52a-8e80-4bed-8503-fe4859bca5ae {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.723413] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e078415-0eda-4891-826d-5dbf6a518e12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.730950] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20277929-54bc-45d7-b0d4-8b633df18499 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.769293] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181304MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1449.769293] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1449.769505] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1449.772301] env[61964]: DEBUG nova.compute.utils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1449.774072] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1449.774072] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1449.793180] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1449.857363] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f99b3d5c-9f51-4815-9507-6522e57e715f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 408cfd06-df36-46a4-9a6c-86dc91339712 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 36056842-1c0b-4f4c-a512-e250fc657620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 41046d8c-861e-4bb4-8f7f-ae7c4d494964 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858456] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c5dd385e-2447-4539-aed1-81d957076f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858456] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858456] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.858456] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1449.858582] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1449.892667] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1449.925623] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1449.925712] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1449.926063] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.926063] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1449.926180] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.926376] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1449.926550] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1449.926790] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1449.926853] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1449.926997] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1449.927483] env[61964]: DEBUG nova.virt.hardware [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1449.928425] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09a68f0-d40a-4f6e-bed3-076b73df2da7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.941812] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Successfully updated port: 906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.950409] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f342d1a3-6755-48af-8a74-4b0e1ad3883d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.974216] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1449.974216] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1449.974216] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1450.047128] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1450.056247] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4f2a6e-566d-49c9-9b4d-cca9ffea2be9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.064034] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7374f8a-2fd9-4028-b398-9cffddd1097d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.098299] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacf4c84-a1e4-41de-852d-ab9de80b4bd6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.106201] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb70dae-318a-4a54-8a2f-adc0be713d13 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.121309] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.138702] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1450.143029] env[61964]: DEBUG nova.compute.manager [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Received event network-changed-1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1450.143029] env[61964]: DEBUG nova.compute.manager [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Refreshing instance network info cache due to event network-changed-1f9ead04-6813-450f-9eac-840d0aa4130a. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1450.143225] env[61964]: DEBUG oslo_concurrency.lockutils [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] Acquiring lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1450.143304] env[61964]: DEBUG oslo_concurrency.lockutils [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] Acquired lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1450.143510] env[61964]: DEBUG nova.network.neutron [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Refreshing network info cache for port 1f9ead04-6813-450f-9eac-840d0aa4130a {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1450.167605] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1450.168348] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.398s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1450.320515] env[61964]: DEBUG nova.policy [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '150a91a845b944a1907ba847ba2ac447', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c08b3d1d934b7ca7754ddf3411da7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1450.443748] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Updating instance_info_cache with network_info: [{"id": "906f4c29-8823-4ee0-9122-0fd429a98586", "address": "fa:16:3e:d6:72:ec", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906f4c29-88", "ovs_interfaceid": "906f4c29-8823-4ee0-9122-0fd429a98586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.458480] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1450.463017] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Instance network_info: |[{"id": "906f4c29-8823-4ee0-9122-0fd429a98586", "address": "fa:16:3e:d6:72:ec", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906f4c29-88", "ovs_interfaceid": "906f4c29-8823-4ee0-9122-0fd429a98586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1450.463178] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:72:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '906f4c29-8823-4ee0-9122-0fd429a98586', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.472756] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating folder: Project (33c08b3d1d934b7ca7754ddf3411da7e). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1450.473497] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5be7815b-965a-404b-946b-24b3f2d2e167 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.489349] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created folder: Project (33c08b3d1d934b7ca7754ddf3411da7e) in parent group-v351942. [ 1450.489349] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating folder: Instances. Parent ref: group-v351958. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1450.489349] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9a84475-36ea-408e-861a-ed2fc38a3a63 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.499066] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created folder: Instances in parent group-v351958. [ 1450.500711] env[61964]: DEBUG oslo.service.loopingcall [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.500711] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1450.500711] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a0f86c6-ec3e-4dc1-979a-1c1b361cb63f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.529026] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.529026] env[61964]: value = "task-1688571" [ 1450.529026] env[61964]: _type = "Task" [ 1450.529026] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.535778] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688571, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.040906] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688571, 'name': CreateVM_Task, 'duration_secs': 0.333613} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.042180] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1451.042938] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1451.043116] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1451.043435] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1451.043767] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44e5b7c4-79da-44fc-aa59-49e51e3b2543 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.052041] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1451.052041] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de09c6-c26c-3780-c7e5-2fa5a6885ae2" [ 1451.052041] env[61964]: _type = "Task" [ 1451.052041] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.064262] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de09c6-c26c-3780-c7e5-2fa5a6885ae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.273838] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Successfully created port: 4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1451.565391] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1451.565786] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1451.566029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1451.656348] env[61964]: DEBUG nova.network.neutron [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Updated VIF entry in instance network info cache for port 1f9ead04-6813-450f-9eac-840d0aa4130a. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1451.656719] env[61964]: DEBUG nova.network.neutron [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Updating instance_info_cache with network_info: [{"id": "1f9ead04-6813-450f-9eac-840d0aa4130a", "address": "fa:16:3e:02:cf:f2", "network": {"id": "3513ca3b-8d79-441b-b1e0-9edb1d4da1b4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-107780581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c63133f1af9f4cdca3ed330f5f6a4044", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f9ead04-68", "ovs_interfaceid": "1f9ead04-6813-450f-9eac-840d0aa4130a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.667634] env[61964]: DEBUG oslo_concurrency.lockutils [req-b82d8099-191f-4726-b66e-3c67625fe3ea req-d875b58f-eae0-4628-bce4-dbbae9aa1faf service nova] Releasing lock "refresh_cache-f2c66aa6-4406-4cfa-8a13-c382eebed6bc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1452.011357] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Successfully created port: 2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1452.684981] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1452.685322] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1452.697795] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1452.771342] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1452.771461] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1452.773503] env[61964]: INFO nova.compute.claims [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.968125] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efc031c-c6e3-40c2-8b70-0e3e2fe98bc1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.976734] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55f99f8-a277-48e7-8b60-70301339503f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.013408] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb201e8-7839-4533-8bb3-cc89d33286cf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.023238] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791fcd2c-c7ca-44d8-a375-e01d2fe2a9e4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.039803] env[61964]: DEBUG nova.compute.provider_tree [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.052706] env[61964]: DEBUG nova.scheduler.client.report [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1453.076947] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1453.077320] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1453.133796] env[61964]: DEBUG nova.compute.utils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1453.135015] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1453.135785] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1453.162716] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1453.317204] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1453.399488] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:40:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1205527224',id=23,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-803501040',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1453.399488] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1453.399488] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1453.399803] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1453.400016] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1453.400234] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1453.400556] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1453.400622] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1453.400775] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1453.400937] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1453.401122] env[61964]: DEBUG nova.virt.hardware [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1453.402498] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b51b44-3e02-4c99-9722-05c1a1865ec7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.417770] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5093abf8-c228-480b-b0e4-58630ac90906 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.434812] env[61964]: DEBUG nova.policy [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '090d5ea8d17c4a94a954df6ee6b6dddb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6379657dbb24d02b267548c34f5b73d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1453.961934] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1453.962244] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1453.979716] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1454.047949] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1454.048298] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1454.051069] env[61964]: INFO nova.compute.claims [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1454.389295] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a7ff3d-8a87-44dd-b1e7-7e5967d21dd9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.398505] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c068eb4-3311-4884-988c-56c7e07e69c0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.432439] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc0f7c9-df56-4c45-8d69-f8bbd752bb6e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.440258] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bad9b3-a71d-4de3-abad-f8d48fa55f36 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.454714] env[61964]: DEBUG nova.compute.provider_tree [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.473191] env[61964]: DEBUG nova.scheduler.client.report [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1454.500409] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.452s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1454.501564] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1454.517510] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Successfully updated port: 4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1454.536070] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1454.536223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquired lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1454.536352] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1454.567467] env[61964]: DEBUG nova.compute.utils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1454.569040] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1454.569215] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1454.585853] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1454.671184] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1454.705582] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1454.705868] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1454.706051] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.706184] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1454.706328] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.706475] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1454.706683] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1454.706841] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1454.707010] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1454.707611] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1454.707611] env[61964]: DEBUG nova.virt.hardware [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1454.708539] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b84f51-64a0-41d8-8d46-836026951dbb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.718168] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fd5fcf-9c20-4800-8b4e-71a22634e557 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.726391] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1454.888256] env[61964]: DEBUG nova.compute.manager [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Received event network-vif-plugged-906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1454.888256] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Acquiring lock "c5dd385e-2447-4539-aed1-81d957076f5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1454.888387] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Lock "c5dd385e-2447-4539-aed1-81d957076f5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1454.888538] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Lock "c5dd385e-2447-4539-aed1-81d957076f5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1454.888704] env[61964]: DEBUG nova.compute.manager [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] No waiting events found dispatching network-vif-plugged-906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1454.888862] env[61964]: WARNING nova.compute.manager [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Received unexpected event network-vif-plugged-906f4c29-8823-4ee0-9122-0fd429a98586 for instance with vm_state building and task_state spawning. [ 1454.890074] env[61964]: DEBUG nova.compute.manager [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Received event network-changed-906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1454.890074] env[61964]: DEBUG nova.compute.manager [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Refreshing instance network info cache due to event network-changed-906f4c29-8823-4ee0-9122-0fd429a98586. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1454.890074] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Acquiring lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1454.890351] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Acquired lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1454.890383] env[61964]: DEBUG nova.network.neutron [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Refreshing network info cache for port 906f4c29-8823-4ee0-9122-0fd429a98586 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1455.005042] env[61964]: DEBUG nova.policy [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55167225cfa24283940bf6dc3dbd6e90', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2718eda15f54774b19418cb62149ba8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1455.037780] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Successfully updated port: 2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.056829] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1455.058573] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1455.058707] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1455.238859] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1455.252565] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Updating instance_info_cache with network_info: [{"id": "4504922d-705f-44f5-9c79-abf1a5fe512e", "address": "fa:16:3e:65:5e:e7", "network": {"id": "521e43ba-8a46-4ad9-b85a-c9d67226275d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2002236805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff335fd73e441f0ac0f6e36f339abe2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4504922d-70", "ovs_interfaceid": "4504922d-705f-44f5-9c79-abf1a5fe512e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.272828] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Releasing lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1455.273548] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance network_info: |[{"id": "4504922d-705f-44f5-9c79-abf1a5fe512e", "address": "fa:16:3e:65:5e:e7", "network": {"id": "521e43ba-8a46-4ad9-b85a-c9d67226275d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2002236805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff335fd73e441f0ac0f6e36f339abe2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4504922d-70", "ovs_interfaceid": "4504922d-705f-44f5-9c79-abf1a5fe512e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1455.273647] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:5e:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1c8fdf9-970c-4ae0-b6d9-f1015196b552', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4504922d-705f-44f5-9c79-abf1a5fe512e', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1455.283789] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Creating folder: Project (fff335fd73e441f0ac0f6e36f339abe2). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1455.286951] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38ac3225-a2b9-412b-8a10-a3d0169a318f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.302078] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Created folder: Project (fff335fd73e441f0ac0f6e36f339abe2) in parent group-v351942. [ 1455.302185] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Creating folder: Instances. Parent ref: group-v351961. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1455.303234] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Successfully created port: 10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1455.306351] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a81de07a-054e-464f-a7ae-916aa16ac742 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.316779] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Created folder: Instances in parent group-v351961. [ 1455.318038] env[61964]: DEBUG oslo.service.loopingcall [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1455.318038] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1455.318038] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e39214f6-2b0e-4a00-9d37-7047b72dc2f4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.355298] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1455.355298] env[61964]: value = "task-1688574" [ 1455.355298] env[61964]: _type = "Task" [ 1455.355298] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.362986] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688574, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.645626] env[61964]: DEBUG nova.compute.manager [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Received event network-vif-plugged-4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1455.646717] env[61964]: DEBUG oslo_concurrency.lockutils [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] Acquiring lock "b8993737-d2ef-4987-8c91-d1320771434a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1455.646717] env[61964]: DEBUG oslo_concurrency.lockutils [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] Lock "b8993737-d2ef-4987-8c91-d1320771434a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1455.646717] env[61964]: DEBUG oslo_concurrency.lockutils [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] Lock "b8993737-d2ef-4987-8c91-d1320771434a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1455.646717] env[61964]: DEBUG nova.compute.manager [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] No waiting events found dispatching network-vif-plugged-4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1455.647194] env[61964]: WARNING nova.compute.manager [req-14cd5350-8929-4d59-a1a2-e56ea40e62f0 req-671be327-e523-4470-b197-28887a317b45 service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Received unexpected event network-vif-plugged-4504922d-705f-44f5-9c79-abf1a5fe512e for instance with vm_state building and task_state spawning. [ 1455.728113] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Updating instance_info_cache with network_info: [{"id": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "address": "fa:16:3e:f6:94:b8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2306e5d1-23", "ovs_interfaceid": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.750118] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1455.750455] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance network_info: |[{"id": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "address": "fa:16:3e:f6:94:b8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2306e5d1-23", "ovs_interfaceid": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1455.751120] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:94:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2306e5d1-233c-4c7b-9ead-f37ef4edab6c', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1455.759516] env[61964]: DEBUG oslo.service.loopingcall [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1455.759935] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1455.764343] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0a88e3b-d078-4201-8f15-1e3c94271f51 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.784715] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1455.784715] env[61964]: value = "task-1688575" [ 1455.784715] env[61964]: _type = "Task" [ 1455.784715] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.793210] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688575, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.871437] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688574, 'name': CreateVM_Task, 'duration_secs': 0.334396} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.871828] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1455.873027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1455.873027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1455.873284] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1455.873450] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a97e2a-8768-4dcc-bf8a-f542ce0e648f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.879635] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for the task: (returnval){ [ 1455.879635] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5248549d-11e1-2c0d-e6cc-26cf5b8d6d88" [ 1455.879635] env[61964]: _type = "Task" [ 1455.879635] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.889934] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5248549d-11e1-2c0d-e6cc-26cf5b8d6d88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.296850] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688575, 'name': CreateVM_Task, 'duration_secs': 0.334628} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.297174] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1456.297745] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1456.391151] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1456.391259] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1456.391404] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1456.391919] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1456.391919] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1456.392185] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fcb8c71-beca-4f42-b8d3-01934a361401 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.397424] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1456.397424] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f27233-2ae4-362d-778d-140dcccb7cd9" [ 1456.397424] env[61964]: _type = "Task" [ 1456.397424] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.409549] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f27233-2ae4-362d-778d-140dcccb7cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.495120] env[61964]: DEBUG nova.network.neutron [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Updated VIF entry in instance network info cache for port 906f4c29-8823-4ee0-9122-0fd429a98586. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1456.495120] env[61964]: DEBUG nova.network.neutron [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Updating instance_info_cache with network_info: [{"id": "906f4c29-8823-4ee0-9122-0fd429a98586", "address": "fa:16:3e:d6:72:ec", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906f4c29-88", "ovs_interfaceid": "906f4c29-8823-4ee0-9122-0fd429a98586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.512046] env[61964]: DEBUG oslo_concurrency.lockutils [req-ea8b4296-fc0b-46e7-97b1-3263abd26d9d req-c139411f-1eba-4b70-a8c8-bce135b4f85b service nova] Releasing lock "refresh_cache-c5dd385e-2447-4539-aed1-81d957076f5f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1456.911864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1456.911864] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1456.913171] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1456.959281] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Successfully created port: e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1458.226252] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Successfully updated port: 10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1458.248988] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1458.248988] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquired lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1458.248988] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1458.386230] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1458.485185] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1458.485763] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1459.883650] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Updating instance_info_cache with network_info: [{"id": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "address": "fa:16:3e:d7:b8:1f", "network": {"id": "d9fd8686-66d8-4479-98c2-cdd9e6b436ed", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2137987265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6379657dbb24d02b267548c34f5b73d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10994424-d6", "ovs_interfaceid": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.897524] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Releasing lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1459.897615] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance network_info: |[{"id": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "address": "fa:16:3e:d7:b8:1f", "network": {"id": "d9fd8686-66d8-4479-98c2-cdd9e6b436ed", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2137987265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6379657dbb24d02b267548c34f5b73d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10994424-d6", "ovs_interfaceid": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1459.897974] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:b8:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10994424-d6d5-4ec7-832e-9ab89826ed6b', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1459.909209] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Creating folder: Project (d6379657dbb24d02b267548c34f5b73d). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1459.909724] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ec71ce6-dd01-4a86-8d65-4b46754c6b7d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.922687] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Created folder: Project (d6379657dbb24d02b267548c34f5b73d) in parent group-v351942. [ 1459.922863] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Creating folder: Instances. Parent ref: group-v351965. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1459.923817] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e51d0bc-0efe-4835-9f74-20c7a5c36cf6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.936448] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Created folder: Instances in parent group-v351965. [ 1459.936448] env[61964]: DEBUG oslo.service.loopingcall [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.936448] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1459.936448] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-445235d4-7407-4741-82c6-ed2e25fdf1ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.959746] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1459.959746] env[61964]: value = "task-1688578" [ 1459.959746] env[61964]: _type = "Task" [ 1459.959746] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.973332] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688578, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.240571] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Received event network-vif-plugged-2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1460.240571] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquiring lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1460.240571] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1460.240947] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1460.240947] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] No waiting events found dispatching network-vif-plugged-2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1460.240947] env[61964]: WARNING nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Received unexpected event network-vif-plugged-2306e5d1-233c-4c7b-9ead-f37ef4edab6c for instance with vm_state building and task_state spawning. [ 1460.241319] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Received event network-changed-4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1460.241319] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Refreshing instance network info cache due to event network-changed-4504922d-705f-44f5-9c79-abf1a5fe512e. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1460.241438] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquiring lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1460.241539] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquired lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1460.241678] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Refreshing network info cache for port 4504922d-705f-44f5-9c79-abf1a5fe512e {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1460.479021] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688578, 'name': CreateVM_Task, 'duration_secs': 0.309582} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.479021] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1460.479021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1460.479021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1460.479021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1460.479283] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc0cf294-e9af-4727-87a3-7db0c6f7fbc7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.484737] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for the task: (returnval){ [ 1460.484737] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529a61b4-766e-f144-a9b7-3655091ad614" [ 1460.484737] env[61964]: _type = "Task" [ 1460.484737] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.494851] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529a61b4-766e-f144-a9b7-3655091ad614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.595846] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Successfully updated port: e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.612362] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1460.612546] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquired lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1460.612716] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1460.786653] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1460.996410] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1460.997460] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1460.997460] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1461.617580] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Updating instance_info_cache with network_info: [{"id": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "address": "fa:16:3e:a9:e8:27", "network": {"id": "50f4e800-eea2-47cf-8994-c9b2fbac8c2b", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-855565702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2718eda15f54774b19418cb62149ba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a2ffe9-ea", "ovs_interfaceid": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.633837] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Releasing lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1461.634520] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance network_info: |[{"id": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "address": "fa:16:3e:a9:e8:27", "network": {"id": "50f4e800-eea2-47cf-8994-c9b2fbac8c2b", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-855565702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2718eda15f54774b19418cb62149ba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a2ffe9-ea", "ovs_interfaceid": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1461.635668] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:e8:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1461.649234] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Creating folder: Project (c2718eda15f54774b19418cb62149ba8). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1461.649841] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e20d0854-7750-48c2-8635-e42433bd03bd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.662617] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Created folder: Project (c2718eda15f54774b19418cb62149ba8) in parent group-v351942. [ 1461.663902] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Creating folder: Instances. Parent ref: group-v351968. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1461.665132] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Updated VIF entry in instance network info cache for port 4504922d-705f-44f5-9c79-abf1a5fe512e. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1461.665682] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Updating instance_info_cache with network_info: [{"id": "4504922d-705f-44f5-9c79-abf1a5fe512e", "address": "fa:16:3e:65:5e:e7", "network": {"id": "521e43ba-8a46-4ad9-b85a-c9d67226275d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2002236805-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff335fd73e441f0ac0f6e36f339abe2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1c8fdf9-970c-4ae0-b6d9-f1015196b552", "external-id": "nsx-vlan-transportzone-993", "segmentation_id": 993, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4504922d-70", "ovs_interfaceid": "4504922d-705f-44f5-9c79-abf1a5fe512e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.667366] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00c82abc-2026-48ce-b6f2-1660137990d1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.677089] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Created folder: Instances in parent group-v351968. [ 1461.677327] env[61964]: DEBUG oslo.service.loopingcall [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.677539] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1461.677707] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d540078-d480-4916-8785-fd44655f1766 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.694973] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Releasing lock "refresh_cache-b8993737-d2ef-4987-8c91-d1320771434a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1461.695239] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Received event network-changed-2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1461.695511] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Refreshing instance network info cache due to event network-changed-2306e5d1-233c-4c7b-9ead-f37ef4edab6c. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1461.695709] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquiring lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1461.695858] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquired lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1461.696027] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Refreshing network info cache for port 2306e5d1-233c-4c7b-9ead-f37ef4edab6c {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1461.702078] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1461.702078] env[61964]: value = "task-1688581" [ 1461.702078] env[61964]: _type = "Task" [ 1461.702078] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.717873] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688581, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.214601] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688581, 'name': CreateVM_Task, 'duration_secs': 0.389461} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.215540] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1462.219110] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1462.219110] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1462.219110] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1462.219110] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-696ca0a8-0b17-436a-b903-73f661c87d45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.222793] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for the task: (returnval){ [ 1462.222793] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520522cd-4aef-82b2-9102-e295fa9e8b2f" [ 1462.222793] env[61964]: _type = "Task" [ 1462.222793] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.232081] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520522cd-4aef-82b2-9102-e295fa9e8b2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.376248] env[61964]: DEBUG nova.compute.manager [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Received event network-vif-plugged-e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1462.377831] env[61964]: DEBUG oslo_concurrency.lockutils [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] Acquiring lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1462.379202] env[61964]: DEBUG oslo_concurrency.lockutils [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1462.379202] env[61964]: DEBUG oslo_concurrency.lockutils [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1462.379310] env[61964]: DEBUG nova.compute.manager [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] No waiting events found dispatching network-vif-plugged-e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1462.382031] env[61964]: WARNING nova.compute.manager [req-064fa677-c886-4cf0-8f1c-8d48cae50a6a req-bafeeee8-5a92-414b-bf67-df33ea7fa49f service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Received unexpected event network-vif-plugged-e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b for instance with vm_state building and task_state spawning. [ 1462.739605] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1462.739891] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.740128] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1463.636164] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Updated VIF entry in instance network info cache for port 2306e5d1-233c-4c7b-9ead-f37ef4edab6c. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1463.636515] env[61964]: DEBUG nova.network.neutron [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Updating instance_info_cache with network_info: [{"id": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "address": "fa:16:3e:f6:94:b8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2306e5d1-23", "ovs_interfaceid": "2306e5d1-233c-4c7b-9ead-f37ef4edab6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.650906] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Releasing lock "refresh_cache-31e6ec86-cf5a-438c-ad8f-aad775fbb376" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1463.653603] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Received event network-vif-plugged-10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1463.653603] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Acquiring lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1463.653603] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1463.653603] env[61964]: DEBUG oslo_concurrency.lockutils [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1463.653896] env[61964]: DEBUG nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] No waiting events found dispatching network-vif-plugged-10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1463.653896] env[61964]: WARNING nova.compute.manager [req-9b9ae0cc-8af8-4fb8-9a67-b61e952b5ac5 req-dfa4ced8-fa3b-428a-aa6b-ef17b6b0982f service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Received unexpected event network-vif-plugged-10994424-d6d5-4ec7-832e-9ab89826ed6b for instance with vm_state building and task_state spawning. [ 1465.079336] env[61964]: DEBUG nova.compute.manager [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Received event network-changed-10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1465.079682] env[61964]: DEBUG nova.compute.manager [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Refreshing instance network info cache due to event network-changed-10994424-d6d5-4ec7-832e-9ab89826ed6b. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1465.079804] env[61964]: DEBUG oslo_concurrency.lockutils [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] Acquiring lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1465.080068] env[61964]: DEBUG oslo_concurrency.lockutils [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] Acquired lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1465.080163] env[61964]: DEBUG nova.network.neutron [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Refreshing network info cache for port 10994424-d6d5-4ec7-832e-9ab89826ed6b {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1465.250673] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1465.250927] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1466.195558] env[61964]: DEBUG nova.network.neutron [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Updated VIF entry in instance network info cache for port 10994424-d6d5-4ec7-832e-9ab89826ed6b. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1466.195558] env[61964]: DEBUG nova.network.neutron [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Updating instance_info_cache with network_info: [{"id": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "address": "fa:16:3e:d7:b8:1f", "network": {"id": "d9fd8686-66d8-4479-98c2-cdd9e6b436ed", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2137987265-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6379657dbb24d02b267548c34f5b73d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10994424-d6", "ovs_interfaceid": "10994424-d6d5-4ec7-832e-9ab89826ed6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.208610] env[61964]: DEBUG oslo_concurrency.lockutils [req-a454e13e-23b2-49b6-9564-0a5c126b9fe7 req-7b389865-7415-4853-8f9f-da5cadb56176 service nova] Releasing lock "refresh_cache-2325430a-6b1a-41d9-bc13-fd7d98e07e9e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1467.894213] env[61964]: DEBUG nova.compute.manager [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Received event network-changed-e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1467.894464] env[61964]: DEBUG nova.compute.manager [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Refreshing instance network info cache due to event network-changed-e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1467.894719] env[61964]: DEBUG oslo_concurrency.lockutils [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] Acquiring lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1467.894872] env[61964]: DEBUG oslo_concurrency.lockutils [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] Acquired lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1467.895049] env[61964]: DEBUG nova.network.neutron [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Refreshing network info cache for port e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1468.606746] env[61964]: DEBUG nova.network.neutron [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Updated VIF entry in instance network info cache for port e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1468.607168] env[61964]: DEBUG nova.network.neutron [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Updating instance_info_cache with network_info: [{"id": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "address": "fa:16:3e:a9:e8:27", "network": {"id": "50f4e800-eea2-47cf-8994-c9b2fbac8c2b", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-855565702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2718eda15f54774b19418cb62149ba8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8a2ffe9-ea", "ovs_interfaceid": "e8a2ffe9-ea5c-496e-84f8-fff1563e2b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.617758] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1468.620509] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1468.655553] env[61964]: DEBUG oslo_concurrency.lockutils [req-46f61621-e781-4a27-8f97-49c910604150 req-5269b991-0329-4e14-bd35-521f6c38b6bd service nova] Releasing lock "refresh_cache-ebcc4aaa-3506-4b4f-80da-532c3f7bb891" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1470.264049] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "ac955d73-c844-4b98-b791-7d7c749c6954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1470.264331] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1472.019740] env[61964]: DEBUG oslo_concurrency.lockutils [None req-60a69151-7460-4deb-8c2e-fec49fd82361 tempest-ImagesOneServerTestJSON-773828785 tempest-ImagesOneServerTestJSON-773828785-project-member] Acquiring lock "871ae746-5bc6-4979-80be-9f67d0db5301" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1472.020062] env[61964]: DEBUG oslo_concurrency.lockutils [None req-60a69151-7460-4deb-8c2e-fec49fd82361 tempest-ImagesOneServerTestJSON-773828785 tempest-ImagesOneServerTestJSON-773828785-project-member] Lock "871ae746-5bc6-4979-80be-9f67d0db5301" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1473.221043] env[61964]: DEBUG oslo_concurrency.lockutils [None req-518d4398-cde3-4c61-8fb8-8d1ef2d67f68 tempest-InstanceActionsTestJSON-2141208745 tempest-InstanceActionsTestJSON-2141208745-project-member] Acquiring lock "8ea5b2d1-bfc8-4531-b5fe-60926e4950d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1473.221650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-518d4398-cde3-4c61-8fb8-8d1ef2d67f68 tempest-InstanceActionsTestJSON-2141208745 tempest-InstanceActionsTestJSON-2141208745-project-member] Lock "8ea5b2d1-bfc8-4531-b5fe-60926e4950d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1475.052125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83f090aa-998b-4eb8-9301-957352111ba0 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1475.052125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83f090aa-998b-4eb8-9301-957352111ba0 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1477.194235] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1477.194235] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1479.221460] env[61964]: WARNING oslo_vmware.rw_handles [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1479.221460] env[61964]: ERROR oslo_vmware.rw_handles [ 1479.222416] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1479.223470] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1479.223597] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Copying Virtual Disk [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/40b3746c-9035-4328-973c-b0c0ce689b78/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1479.223884] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f890643-0730-4c7e-aabe-958a2330747f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.232279] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for the task: (returnval){ [ 1479.232279] env[61964]: value = "task-1688582" [ 1479.232279] env[61964]: _type = "Task" [ 1479.232279] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.243179] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.449222] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5641a49c-d0f5-4836-bd1a-9385e825a26b tempest-AttachInterfacesUnderV243Test-2126647480 tempest-AttachInterfacesUnderV243Test-2126647480-project-member] Acquiring lock "5bafa3d0-3619-4d7e-b625-9b389394738f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1479.449293] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5641a49c-d0f5-4836-bd1a-9385e825a26b tempest-AttachInterfacesUnderV243Test-2126647480 tempest-AttachInterfacesUnderV243Test-2126647480-project-member] Lock "5bafa3d0-3619-4d7e-b625-9b389394738f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1479.744451] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.243932] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.504130] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3d15b390-6556-4a81-9fb9-45c708e8da48 tempest-ServersV294TestFqdnHostnames-1706479718 tempest-ServersV294TestFqdnHostnames-1706479718-project-member] Acquiring lock "40aa82a7-09f6-4e99-bd8b-32c500aac259" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1480.504382] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3d15b390-6556-4a81-9fb9-45c708e8da48 tempest-ServersV294TestFqdnHostnames-1706479718 tempest-ServersV294TestFqdnHostnames-1706479718-project-member] Lock "40aa82a7-09f6-4e99-bd8b-32c500aac259" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1480.745405] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.141870] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a68af9f7-b3e4-41ad-8dbb-a8a4d2d14b8c tempest-TenantUsagesTestJSON-1307804085 tempest-TenantUsagesTestJSON-1307804085-project-member] Acquiring lock "daea8bd9-5d0a-40e3-9353-28785d5fd7ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1481.142096] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a68af9f7-b3e4-41ad-8dbb-a8a4d2d14b8c tempest-TenantUsagesTestJSON-1307804085 tempest-TenantUsagesTestJSON-1307804085-project-member] Lock "daea8bd9-5d0a-40e3-9353-28785d5fd7ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1481.142284] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8679ef6b-aeb2-49fa-9e54-c76e1d9651f5 tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Acquiring lock "24dfefdd-8357-4f82-aaf0-9f6e6907291f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1481.142520] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8679ef6b-aeb2-49fa-9e54-c76e1d9651f5 tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Lock "24dfefdd-8357-4f82-aaf0-9f6e6907291f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1481.245794] env[61964]: DEBUG oslo_vmware.exceptions [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1481.246084] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1481.249436] env[61964]: ERROR nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.249436] env[61964]: Faults: ['InvalidArgument'] [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Traceback (most recent call last): [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] yield resources [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self.driver.spawn(context, instance, image_meta, [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self._fetch_image_if_missing(context, vi) [ 1481.249436] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] image_cache(vi, tmp_image_ds_loc) [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] vm_util.copy_virtual_disk( [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] session._wait_for_task(vmdk_copy_task) [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return self.wait_for_task(task_ref) [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return evt.wait() [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] result = hub.switch() [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1481.249798] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return self.greenlet.switch() [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self.f(*self.args, **self.kw) [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] raise exceptions.translate_fault(task_info.error) [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Faults: ['InvalidArgument'] [ 1481.250238] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] [ 1481.250238] env[61964]: INFO nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Terminating instance [ 1481.251376] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1481.251577] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.251814] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-211de5e2-0c58-4ef3-ae65-6b6aabc89434 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.254426] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1481.254584] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquired lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1481.254793] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1481.262045] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.262233] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1481.263471] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-486c73c0-76fc-42f1-b79a-e696e134b8bf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.271224] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Waiting for the task: (returnval){ [ 1481.271224] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4992c-abf5-f765-215f-da91fbbc0748" [ 1481.271224] env[61964]: _type = "Task" [ 1481.271224] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.279823] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4992c-abf5-f765-215f-da91fbbc0748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.286812] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1481.426119] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.438109] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Releasing lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1481.438550] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1481.438738] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1481.441714] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9025ba-0e01-4041-a33e-4b9047e8c192 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.453281] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1481.453281] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8180f48a-1cc2-4474-960b-8d5b8ff22e9a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.488270] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1481.488654] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1481.488837] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Deleting the datastore file [datastore1] 408cfd06-df36-46a4-9a6c-86dc91339712 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1481.488942] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-618c0033-122d-410b-8222-8d1121e5de97 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.495914] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for the task: (returnval){ [ 1481.495914] env[61964]: value = "task-1688584" [ 1481.495914] env[61964]: _type = "Task" [ 1481.495914] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.502850] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.785736] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1481.785736] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Creating directory with path [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.785736] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcfa58fb-44af-4a26-919e-1188091e23ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.804294] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Created directory with path [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.804345] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Fetch image to [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1481.805166] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1481.805415] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e4a4a8-0d53-425d-9083-99ccadc33cef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.813790] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844a2818-870e-40ca-9ace-2166ce7d7ecf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.826187] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9595bf-bd20-40fd-b60f-3913292226b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.862022] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbee6f4d-6cb7-49e5-b3ec-4cab73d04c6a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.873034] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b5abb6d-e969-4576-8894-b385ccfd7b3d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.957699] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1482.004749] env[61964]: DEBUG oslo_vmware.api [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Task: {'id': task-1688584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035777} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.005038] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1482.005224] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1482.005395] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1482.006027] env[61964]: INFO nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1482.006315] env[61964]: DEBUG oslo.service.loopingcall [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.006806] env[61964]: DEBUG nova.compute.manager [-] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Skipping network deallocation for instance since networking was not requested. {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1482.010267] env[61964]: DEBUG nova.compute.claims [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1482.010437] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1482.010652] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1482.023452] env[61964]: DEBUG oslo_vmware.rw_handles [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1482.094107] env[61964]: DEBUG oslo_vmware.rw_handles [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1482.094303] env[61964]: DEBUG oslo_vmware.rw_handles [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1482.471899] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f0314b-babc-48ae-b13e-943e052f0662 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.480287] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278299ed-99fd-4455-a6a8-838e915d5f36 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.512011] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1739a3-bf2c-4d4c-acc0-8622314378cd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.519592] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccec1c5f-8ce5-41cd-8c05-e98e2c498fa6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.532753] env[61964]: DEBUG nova.compute.provider_tree [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.555709] env[61964]: DEBUG nova.scheduler.client.report [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1482.580318] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.569s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1482.581375] env[61964]: ERROR nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.581375] env[61964]: Faults: ['InvalidArgument'] [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Traceback (most recent call last): [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self.driver.spawn(context, instance, image_meta, [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self._fetch_image_if_missing(context, vi) [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] image_cache(vi, tmp_image_ds_loc) [ 1482.581375] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] vm_util.copy_virtual_disk( [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] session._wait_for_task(vmdk_copy_task) [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return self.wait_for_task(task_ref) [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return evt.wait() [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] result = hub.switch() [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] return self.greenlet.switch() [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1482.581741] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] self.f(*self.args, **self.kw) [ 1482.582119] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1482.582119] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] raise exceptions.translate_fault(task_info.error) [ 1482.582119] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.582119] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Faults: ['InvalidArgument'] [ 1482.582119] env[61964]: ERROR nova.compute.manager [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] [ 1482.582917] env[61964]: DEBUG nova.compute.utils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1482.587918] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Build of instance 408cfd06-df36-46a4-9a6c-86dc91339712 was re-scheduled: A specified parameter was not correct: fileType [ 1482.587918] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1482.588872] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1482.589282] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquiring lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1482.589538] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Acquired lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1482.589914] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1482.718540] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1482.825571] env[61964]: DEBUG nova.network.neutron [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.839736] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Releasing lock "refresh_cache-408cfd06-df36-46a4-9a6c-86dc91339712" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1482.839976] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1482.840177] env[61964]: DEBUG nova.compute.manager [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] [instance: 408cfd06-df36-46a4-9a6c-86dc91339712] Skipping network deallocation for instance since networking was not requested. {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1482.971676] env[61964]: INFO nova.scheduler.client.report [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Deleted allocations for instance 408cfd06-df36-46a4-9a6c-86dc91339712 [ 1483.007090] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4b94ea60-975d-467e-89d5-91fdf336f531 tempest-ServersAdmin275Test-1593003819 tempest-ServersAdmin275Test-1593003819-project-member] Lock "408cfd06-df36-46a4-9a6c-86dc91339712" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.153s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1483.049530] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1483.114569] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1483.114569] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1483.116097] env[61964]: INFO nova.compute.claims [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.526360] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddee117d-754e-4f89-96ba-22c10239c970 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.536368] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fc40c9-d1d0-4105-9ee6-0b8ba17e062c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.569599] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e635a03-6242-421f-9754-bb3b259ba0b1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.577575] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b57ca2d-9272-4c72-912b-c2095737395a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.594469] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.608136] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1483.631322] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.516s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1483.631581] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1483.686133] env[61964]: DEBUG nova.compute.utils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.687775] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1483.687775] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1483.702273] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1483.826941] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1483.844555] env[61964]: DEBUG nova.policy [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46f96afd6f6749859606fb5ff1b20bc9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adec86fd331649de9cf354d21ae7d839', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1483.876404] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.876942] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.877275] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.878356] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.878356] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.878356] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.878356] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.878356] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.878723] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.878995] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.881454] env[61964]: DEBUG nova.virt.hardware [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.881454] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1e50ae-dc72-4275-b1d3-198c1a4e7a73 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.892199] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d61706-ba62-4f8c-bc0b-5925ce6c59d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.710616] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Successfully created port: 9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.519705] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4d99a370-109b-4227-a2b5-4d1516b8703c tempest-ServerDiagnosticsV248Test-172020440 tempest-ServerDiagnosticsV248Test-172020440-project-member] Acquiring lock "5413b152-2cd7-41c4-bf18-bd3d51971a6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1485.520140] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4d99a370-109b-4227-a2b5-4d1516b8703c tempest-ServerDiagnosticsV248Test-172020440 tempest-ServerDiagnosticsV248Test-172020440-project-member] Lock "5413b152-2cd7-41c4-bf18-bd3d51971a6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1486.037935] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Successfully updated port: 9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.056994] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1486.057939] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1486.058171] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1486.115912] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1486.402243] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Updating instance_info_cache with network_info: [{"id": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "address": "fa:16:3e:35:0d:c8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d795162-35", "ovs_interfaceid": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.424933] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1486.425296] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance network_info: |[{"id": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "address": "fa:16:3e:35:0d:c8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d795162-35", "ovs_interfaceid": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1486.426054] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:0d:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d795162-352a-4ffb-89c1-a37a9d7d3a94', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.437122] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating folder: Project (adec86fd331649de9cf354d21ae7d839). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1486.437122] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2d136f9-7708-49ed-887a-3830b00ba1af {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.445831] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created folder: Project (adec86fd331649de9cf354d21ae7d839) in parent group-v351942. [ 1486.446025] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating folder: Instances. Parent ref: group-v351971. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1486.446249] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58078cb3-fff2-4939-9f67-3616eb971c63 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.455418] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created folder: Instances in parent group-v351971. [ 1486.455676] env[61964]: DEBUG oslo.service.loopingcall [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.455930] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1486.456147] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b98dc66d-d9db-4b1d-a063-ff9d2404ba4b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.487970] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.487970] env[61964]: value = "task-1688587" [ 1486.487970] env[61964]: _type = "Task" [ 1486.487970] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.498836] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688587, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.821497] env[61964]: DEBUG nova.compute.manager [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Received event network-vif-plugged-9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1486.823166] env[61964]: DEBUG oslo_concurrency.lockutils [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] Acquiring lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1486.823280] env[61964]: DEBUG oslo_concurrency.lockutils [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1486.823531] env[61964]: DEBUG oslo_concurrency.lockutils [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1486.823777] env[61964]: DEBUG nova.compute.manager [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] No waiting events found dispatching network-vif-plugged-9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1486.824265] env[61964]: WARNING nova.compute.manager [req-66e0fa1e-e136-4caf-b0d7-12b7dfc40206 req-42361903-cd66-46b5-a835-03d735d83283 service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Received unexpected event network-vif-plugged-9d795162-352a-4ffb-89c1-a37a9d7d3a94 for instance with vm_state building and task_state spawning. [ 1487.004805] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688587, 'name': CreateVM_Task, 'duration_secs': 0.412464} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.004805] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1487.004805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1487.004805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1487.004805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1487.005063] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cb36da5-9c20-483d-b331-112f25f53e87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.011192] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 1487.011192] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525ed130-d51f-ac1f-121c-98549328850c" [ 1487.011192] env[61964]: _type = "Task" [ 1487.011192] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.017859] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525ed130-d51f-ac1f-121c-98549328850c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.521302] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1487.521572] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.522140] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1489.604159] env[61964]: DEBUG nova.compute.manager [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Received event network-changed-9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1489.604159] env[61964]: DEBUG nova.compute.manager [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Refreshing instance network info cache due to event network-changed-9d795162-352a-4ffb-89c1-a37a9d7d3a94. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1489.604159] env[61964]: DEBUG oslo_concurrency.lockutils [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] Acquiring lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1489.604159] env[61964]: DEBUG oslo_concurrency.lockutils [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] Acquired lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1489.604159] env[61964]: DEBUG nova.network.neutron [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Refreshing network info cache for port 9d795162-352a-4ffb-89c1-a37a9d7d3a94 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1490.491828] env[61964]: DEBUG nova.network.neutron [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Updated VIF entry in instance network info cache for port 9d795162-352a-4ffb-89c1-a37a9d7d3a94. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1490.491828] env[61964]: DEBUG nova.network.neutron [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Updating instance_info_cache with network_info: [{"id": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "address": "fa:16:3e:35:0d:c8", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d795162-35", "ovs_interfaceid": "9d795162-352a-4ffb-89c1-a37a9d7d3a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.505232] env[61964]: DEBUG oslo_concurrency.lockutils [req-ee03dd8e-7554-4bc2-8b36-bac7d7ce5017 req-cf0d948d-1a8e-4010-9ed2-6123fadbca3d service nova] Releasing lock "refresh_cache-8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1492.428996] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e578cae-a84d-4f4e-a058-70b3877021f7 tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Acquiring lock "571e9cdb-28ac-43de-a0ed-45458f12d68d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1492.429314] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e578cae-a84d-4f4e-a058-70b3877021f7 tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Lock "571e9cdb-28ac-43de-a0ed-45458f12d68d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1494.514218] env[61964]: DEBUG oslo_concurrency.lockutils [None req-287e749f-5edc-4bfe-8634-9afd70738b1e tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Acquiring lock "ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1494.514518] env[61964]: DEBUG oslo_concurrency.lockutils [None req-287e749f-5edc-4bfe-8634-9afd70738b1e tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Lock "ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1496.539349] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8d635c1a-b3e5-4a82-a8e6-23f14c77c5b8 tempest-ServerActionsTestOtherB-1760892883 tempest-ServerActionsTestOtherB-1760892883-project-member] Acquiring lock "0ad4f562-805b-48e0-99ae-53934da0af4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1496.539349] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8d635c1a-b3e5-4a82-a8e6-23f14c77c5b8 tempest-ServerActionsTestOtherB-1760892883 tempest-ServerActionsTestOtherB-1760892883-project-member] Lock "0ad4f562-805b-48e0-99ae-53934da0af4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1502.168533] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386019] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386019] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1502.386019] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1502.410912] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.413495] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.413647] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414087] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414087] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414087] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414196] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414274] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.414391] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.415027] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1502.415027] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1505.748530] env[61964]: DEBUG oslo_concurrency.lockutils [None req-02930160-1860-41b5-a23a-cb1bdd129452 tempest-ServersTestManualDisk-2090240948 tempest-ServersTestManualDisk-2090240948-project-member] Acquiring lock "068818f5-6df6-4de7-8158-c4f8bf11bb9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1505.748530] env[61964]: DEBUG oslo_concurrency.lockutils [None req-02930160-1860-41b5-a23a-cb1bdd129452 tempest-ServersTestManualDisk-2090240948 tempest-ServersTestManualDisk-2090240948-project-member] Lock "068818f5-6df6-4de7-8158-c4f8bf11bb9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1506.383557] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.609117] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc31d29a-10d7-4ecf-8295-b9ba09b46025 tempest-ServerAddressesNegativeTestJSON-963656082 tempest-ServerAddressesNegativeTestJSON-963656082-project-member] Acquiring lock "9fa91cf5-e051-4136-bdd2-66beb12a8cdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1506.609297] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc31d29a-10d7-4ecf-8295-b9ba09b46025 tempest-ServerAddressesNegativeTestJSON-963656082 tempest-ServerAddressesNegativeTestJSON-963656082-project-member] Lock "9fa91cf5-e051-4136-bdd2-66beb12a8cdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1506.844039] env[61964]: DEBUG oslo_concurrency.lockutils [None req-be4d9c64-bdd4-484f-b03f-7b50e972412b tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Acquiring lock "be0bc30b-c63d-4b33-9668-bbcd7d889f79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1506.844039] env[61964]: DEBUG oslo_concurrency.lockutils [None req-be4d9c64-bdd4-484f-b03f-7b50e972412b tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Lock "be0bc30b-c63d-4b33-9668-bbcd7d889f79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1507.378742] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.383454] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.383648] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.877676] env[61964]: DEBUG oslo_concurrency.lockutils [None req-833f3f57-3b5e-4684-be64-5dcfb0142f41 tempest-ServerAddressesTestJSON-998688652 tempest-ServerAddressesTestJSON-998688652-project-member] Acquiring lock "1fffa4d3-fe6b-47c9-ad52-db62c0c88a03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1507.877676] env[61964]: DEBUG oslo_concurrency.lockutils [None req-833f3f57-3b5e-4684-be64-5dcfb0142f41 tempest-ServerAddressesTestJSON-998688652 tempest-ServerAddressesTestJSON-998688652-project-member] Lock "1fffa4d3-fe6b-47c9-ad52-db62c0c88a03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1508.189470] env[61964]: DEBUG oslo_concurrency.lockutils [None req-42d4c102-91f9-4666-a2a5-72894a859014 tempest-ServerRescueTestJSONUnderV235-1639241418 tempest-ServerRescueTestJSONUnderV235-1639241418-project-member] Acquiring lock "02a9b7be-f0a7-42f0-ac71-860e753f9408" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1508.189731] env[61964]: DEBUG oslo_concurrency.lockutils [None req-42d4c102-91f9-4666-a2a5-72894a859014 tempest-ServerRescueTestJSONUnderV235-1639241418 tempest-ServerRescueTestJSONUnderV235-1639241418-project-member] Lock "02a9b7be-f0a7-42f0-ac71-860e753f9408" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1508.383424] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.383677] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.383825] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1509.383585] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.395427] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1509.395563] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1509.395716] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1509.395875] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1509.397090] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4094b0f-1f03-4ae2-a35e-3f9182cec478 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.405977] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74800569-896d-4c87-af42-d9385cfddc35 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.419955] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01b5c7f-5a3a-4637-8314-b194024627d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.426701] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ad904b-495c-4293-85a9-919d7be410ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.458621] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181367MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1509.458772] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1509.458969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1509.545349] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f99b3d5c-9f51-4815-9507-6522e57e715f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.545474] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 36056842-1c0b-4f4c-a512-e250fc657620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.545653] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 41046d8c-861e-4bb4-8f7f-ae7c4d494964 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.545818] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546013] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c5dd385e-2447-4539-aed1-81d957076f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546229] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546397] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546557] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546713] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.546890] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1509.574705] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.607982] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.620646] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.633274] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 871ae746-5bc6-4979-80be-9f67d0db5301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.644719] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8ea5b2d1-bfc8-4531-b5fe-60926e4950d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.655876] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.666546] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.683192] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5bafa3d0-3619-4d7e-b625-9b389394738f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.695589] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 40aa82a7-09f6-4e99-bd8b-32c500aac259 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.709352] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 24dfefdd-8357-4f82-aaf0-9f6e6907291f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.728316] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance daea8bd9-5d0a-40e3-9353-28785d5fd7ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.739686] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5413b152-2cd7-41c4-bf18-bd3d51971a6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.752047] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 571e9cdb-28ac-43de-a0ed-45458f12d68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.763999] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.777007] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ad4f562-805b-48e0-99ae-53934da0af4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.796731] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 068818f5-6df6-4de7-8158-c4f8bf11bb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.811964] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9fa91cf5-e051-4136-bdd2-66beb12a8cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.828874] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance be0bc30b-c63d-4b33-9668-bbcd7d889f79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.843487] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.856499] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 02a9b7be-f0a7-42f0-ac71-860e753f9408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1509.856847] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1509.857542] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1510.311292] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc924ad6-b8c1-4eee-9a27-7e2a0d6e7e97 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.320742] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaa60e8-8443-4b4a-852c-89ba1775f164 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.352221] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2754e11a-3d50-48ce-be04-0851c8234e2c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.359451] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72e9951-ed15-43b2-bbcc-a12cb89bcd6b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.372289] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.389894] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1510.407950] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1510.408176] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.949s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1511.404019] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1517.322019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0208860-679b-4290-be2b-e4c8368321b9 tempest-ImagesOneServerNegativeTestJSON-637618724 tempest-ImagesOneServerNegativeTestJSON-637618724-project-member] Acquiring lock "ece2c65c-60f9-4a4e-b135-f79d7adb188e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1517.322413] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0208860-679b-4290-be2b-e4c8368321b9 tempest-ImagesOneServerNegativeTestJSON-637618724 tempest-ImagesOneServerNegativeTestJSON-637618724-project-member] Lock "ece2c65c-60f9-4a4e-b135-f79d7adb188e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1527.472196] env[61964]: WARNING oslo_vmware.rw_handles [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1527.472196] env[61964]: ERROR oslo_vmware.rw_handles [ 1527.472761] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1527.474128] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1527.474344] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Copying Virtual Disk [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/e5315bed-af01-46e8-aa74-25e11e26799e/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1527.474618] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c5846b1-4a7f-440d-8c77-8e9dcc1cc30a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.483038] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Waiting for the task: (returnval){ [ 1527.483038] env[61964]: value = "task-1688588" [ 1527.483038] env[61964]: _type = "Task" [ 1527.483038] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.490603] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': task-1688588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.992682] env[61964]: DEBUG oslo_vmware.exceptions [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1527.992996] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1527.993666] env[61964]: ERROR nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.993666] env[61964]: Faults: ['InvalidArgument'] [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Traceback (most recent call last): [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] yield resources [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self.driver.spawn(context, instance, image_meta, [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self._fetch_image_if_missing(context, vi) [ 1527.993666] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] image_cache(vi, tmp_image_ds_loc) [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] vm_util.copy_virtual_disk( [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] session._wait_for_task(vmdk_copy_task) [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return self.wait_for_task(task_ref) [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return evt.wait() [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] result = hub.switch() [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1527.994000] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return self.greenlet.switch() [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self.f(*self.args, **self.kw) [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] raise exceptions.translate_fault(task_info.error) [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Faults: ['InvalidArgument'] [ 1527.994305] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] [ 1527.994305] env[61964]: INFO nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Terminating instance [ 1527.995566] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1527.995765] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1527.996013] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66b1dccd-225b-4d09-8899-08524c17c5b1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.999141] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1527.999400] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1528.000163] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4395d5cc-ff20-4c69-97a7-91447cc85cea {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.004975] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.004975] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1528.006048] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b998c4f0-68c2-4c01-b3eb-b08e95aa2d77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.012160] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1528.012160] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91f5d371-ee5b-4f49-9ba5-8f75b986e413 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.012813] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 1528.012813] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a8526f-780f-ee6a-87fa-0acee0241adc" [ 1528.012813] env[61964]: _type = "Task" [ 1528.012813] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.020304] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a8526f-780f-ee6a-87fa-0acee0241adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.105328] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1528.105554] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1528.105730] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Deleting the datastore file [datastore1] f99b3d5c-9f51-4815-9507-6522e57e715f {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.105992] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31df9d4f-dc5f-4a14-9e3b-308fbfb35569 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.112269] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Waiting for the task: (returnval){ [ 1528.112269] env[61964]: value = "task-1688590" [ 1528.112269] env[61964]: _type = "Task" [ 1528.112269] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.119722] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': task-1688590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.524564] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1528.524564] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating directory with path [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1528.524564] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50bfb5ac-b1b9-4033-95b3-f5f1ea18a503 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.535841] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created directory with path [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1528.536052] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Fetch image to [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1528.536255] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1528.536997] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f75e99-e81e-481e-9c25-edc883362809 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.544043] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba0bb8a-5d02-4fd0-bbd7-9129e5aac81d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.553493] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc26423f-7dc2-474a-b0de-d05de0310665 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.583554] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef682a0-a0c6-4301-b11b-34e1bc58c24d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.589539] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-17f7b8fa-291f-440b-9e10-2916f35b428c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.621286] env[61964]: DEBUG oslo_vmware.api [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Task: {'id': task-1688590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07448} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.621522] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1528.621700] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1528.621873] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1528.622430] env[61964]: INFO nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1528.624315] env[61964]: DEBUG nova.compute.claims [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1528.624510] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1528.624767] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1528.675232] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1528.735383] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1528.794270] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1528.794466] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1529.099583] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c512b5ca-b601-4d67-90a6-58c773949823 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.108467] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1355c22-5fa9-4e56-b54c-1f217c2371bb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.137450] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e024bd1-faa6-422c-9896-40fcec1a9586 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.144984] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cc01cf-e66e-44a9-a40a-509e9de700ce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.159786] env[61964]: DEBUG nova.compute.provider_tree [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1529.169150] env[61964]: DEBUG nova.scheduler.client.report [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1529.188762] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.564s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1529.189035] env[61964]: ERROR nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1529.189035] env[61964]: Faults: ['InvalidArgument'] [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Traceback (most recent call last): [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self.driver.spawn(context, instance, image_meta, [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self._fetch_image_if_missing(context, vi) [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] image_cache(vi, tmp_image_ds_loc) [ 1529.189035] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] vm_util.copy_virtual_disk( [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] session._wait_for_task(vmdk_copy_task) [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return self.wait_for_task(task_ref) [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return evt.wait() [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] result = hub.switch() [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] return self.greenlet.switch() [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1529.189518] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] self.f(*self.args, **self.kw) [ 1529.190022] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1529.190022] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] raise exceptions.translate_fault(task_info.error) [ 1529.190022] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1529.190022] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Faults: ['InvalidArgument'] [ 1529.190022] env[61964]: ERROR nova.compute.manager [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] [ 1529.190022] env[61964]: DEBUG nova.compute.utils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1529.191204] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Build of instance f99b3d5c-9f51-4815-9507-6522e57e715f was re-scheduled: A specified parameter was not correct: fileType [ 1529.191204] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1529.191570] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1529.191735] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1529.191888] env[61964]: DEBUG nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1529.192056] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1529.726444] env[61964]: DEBUG nova.network.neutron [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.739288] env[61964]: INFO nova.compute.manager [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] [instance: f99b3d5c-9f51-4815-9507-6522e57e715f] Took 0.54 seconds to deallocate network for instance. [ 1529.839803] env[61964]: INFO nova.scheduler.client.report [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Deleted allocations for instance f99b3d5c-9f51-4815-9507-6522e57e715f [ 1529.865203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-336c0491-ba34-404d-abaf-25d1bc89f02f tempest-ServersAdminNegativeTestJSON-825345952 tempest-ServersAdminNegativeTestJSON-825345952-project-member] Lock "f99b3d5c-9f51-4815-9507-6522e57e715f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.066s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1529.875632] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1529.927627] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1529.927627] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1529.930755] env[61964]: INFO nova.compute.claims [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1530.357021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b9e088-11c3-4366-8396-a19d3da65720 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.363088] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0850d7-1d40-4f69-b708-f08b58a170c6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.399025] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31549c56-0aa0-409e-91b7-8ecc7851fe72 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.406810] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f3e4e7-fc2d-4c4d-bab6-7443c7e529e0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.420143] env[61964]: DEBUG nova.compute.provider_tree [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.429976] env[61964]: DEBUG nova.scheduler.client.report [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1530.442399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1530.443179] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1530.484690] env[61964]: DEBUG nova.compute.utils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.486118] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1530.486329] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1530.499157] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1530.566791] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1530.591701] env[61964]: DEBUG nova.policy [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eba0b13c97dc427ab0da4fdbd71ae85d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c129e8a559b4264927ff0d2510aa439', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1530.596070] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1530.596370] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1530.596542] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1530.596725] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1530.596868] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1530.597017] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1530.597220] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1530.597380] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1530.597572] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1530.597744] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1530.597914] env[61964]: DEBUG nova.virt.hardware [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1530.598740] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cab3492-46f1-4ae3-b4d3-771270bc1fcd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.607066] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f616ef-6ab1-4190-bb5b-ce5862cdcbf5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.108612] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Successfully created port: 1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.950778] env[61964]: DEBUG nova.compute.manager [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Received event network-vif-plugged-1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1531.950955] env[61964]: DEBUG oslo_concurrency.lockutils [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] Acquiring lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1531.951253] env[61964]: DEBUG oslo_concurrency.lockutils [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1531.951369] env[61964]: DEBUG oslo_concurrency.lockutils [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1531.951534] env[61964]: DEBUG nova.compute.manager [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] No waiting events found dispatching network-vif-plugged-1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1531.951699] env[61964]: WARNING nova.compute.manager [req-2e5d32b2-049a-49fc-bf03-90329c5c315a req-a81725b2-eae4-429b-80ae-4e586397bc83 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Received unexpected event network-vif-plugged-1be55fbf-50f0-4092-99ea-5b717e8d3e4c for instance with vm_state building and task_state spawning. [ 1531.952972] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Successfully updated port: 1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1531.973022] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1531.973022] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1531.973022] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1532.021223] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1532.518309] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Updating instance_info_cache with network_info: [{"id": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "address": "fa:16:3e:d3:be:fe", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1be55fbf-50", "ovs_interfaceid": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.533344] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1532.533640] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance network_info: |[{"id": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "address": "fa:16:3e:d3:be:fe", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1be55fbf-50", "ovs_interfaceid": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1532.534030] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:be:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a92a4ffe-7939-4697-bf98-5b22e2c7feda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1be55fbf-50f0-4092-99ea-5b717e8d3e4c', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.542315] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating folder: Project (3c129e8a559b4264927ff0d2510aa439). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1532.543216] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f64f039-c6fd-4783-abf2-cb3a40e365d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.554853] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created folder: Project (3c129e8a559b4264927ff0d2510aa439) in parent group-v351942. [ 1532.555051] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating folder: Instances. Parent ref: group-v351974. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1532.555293] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfb1d5d7-6980-4d56-b778-fbe183bcc498 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.564169] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created folder: Instances in parent group-v351974. [ 1532.564392] env[61964]: DEBUG oslo.service.loopingcall [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.564566] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1532.564752] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ad87d5f-0ece-4cf5-a9cb-c2ef14d069c5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.588023] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.588023] env[61964]: value = "task-1688593" [ 1532.588023] env[61964]: _type = "Task" [ 1532.588023] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.595308] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688593, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.097339] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688593, 'name': CreateVM_Task, 'duration_secs': 0.295484} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.097507] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1533.098201] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1533.098366] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1533.098697] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1533.098955] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fffc8fee-4720-4256-b63d-c411438dccdd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.104858] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 1533.104858] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526ede55-8f83-f67f-8d95-a7e26b1f14f1" [ 1533.104858] env[61964]: _type = "Task" [ 1533.104858] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.114302] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526ede55-8f83-f67f-8d95-a7e26b1f14f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.615874] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1533.616162] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1533.616373] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1534.099190] env[61964]: DEBUG nova.compute.manager [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Received event network-changed-1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1534.102022] env[61964]: DEBUG nova.compute.manager [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Refreshing instance network info cache due to event network-changed-1be55fbf-50f0-4092-99ea-5b717e8d3e4c. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1534.102022] env[61964]: DEBUG oslo_concurrency.lockutils [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] Acquiring lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1534.102022] env[61964]: DEBUG oslo_concurrency.lockutils [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] Acquired lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1534.102022] env[61964]: DEBUG nova.network.neutron [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Refreshing network info cache for port 1be55fbf-50f0-4092-99ea-5b717e8d3e4c {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1534.491786] env[61964]: DEBUG nova.network.neutron [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Updated VIF entry in instance network info cache for port 1be55fbf-50f0-4092-99ea-5b717e8d3e4c. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1534.492157] env[61964]: DEBUG nova.network.neutron [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Updating instance_info_cache with network_info: [{"id": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "address": "fa:16:3e:d3:be:fe", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1be55fbf-50", "ovs_interfaceid": "1be55fbf-50f0-4092-99ea-5b717e8d3e4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.507585] env[61964]: DEBUG oslo_concurrency.lockutils [req-30731307-9e54-4455-a9c6-b5b35e2877cc req-1697be22-469b-4032-a3a4-20a952ccfb97 service nova] Releasing lock "refresh_cache-b8231080-7a09-4e00-ab2b-e9ff4abf352a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1536.155019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1536.155019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1562.384814] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1562.385088] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1562.385152] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1562.405385] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.405590] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.405590] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.405750] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.405871] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407100] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407100] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407100] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407100] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407100] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1562.407327] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1562.407327] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.384676] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.384972] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.380856] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.383864] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.383864] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.383864] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1569.384614] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.384613] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.396577] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1571.396791] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1571.396963] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1571.397123] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1571.398318] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a1cc85-f64e-4854-ba29-703cdcfef3d9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.407127] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cc9fa1-ca64-4891-a268-f4c068309446 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.420621] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13330b4-9cb3-4843-851b-35ab03502200 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.427192] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4935c3-6718-4fec-9056-1af7344c9607 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.457502] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181329MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1571.457662] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1571.457854] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1571.531795] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 36056842-1c0b-4f4c-a512-e250fc657620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.531966] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 41046d8c-861e-4bb4-8f7f-ae7c4d494964 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532109] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532234] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c5dd385e-2447-4539-aed1-81d957076f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532352] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532470] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532584] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532698] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532807] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.532918] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1571.543699] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.554455] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.564469] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 871ae746-5bc6-4979-80be-9f67d0db5301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.575528] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8ea5b2d1-bfc8-4531-b5fe-60926e4950d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.585294] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.594814] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.604459] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5bafa3d0-3619-4d7e-b625-9b389394738f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.614247] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 40aa82a7-09f6-4e99-bd8b-32c500aac259 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.625080] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 24dfefdd-8357-4f82-aaf0-9f6e6907291f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.634763] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance daea8bd9-5d0a-40e3-9353-28785d5fd7ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.643843] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5413b152-2cd7-41c4-bf18-bd3d51971a6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.654433] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 571e9cdb-28ac-43de-a0ed-45458f12d68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.664407] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.674636] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ad4f562-805b-48e0-99ae-53934da0af4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.685217] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 068818f5-6df6-4de7-8158-c4f8bf11bb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.695211] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9fa91cf5-e051-4136-bdd2-66beb12a8cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.705129] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance be0bc30b-c63d-4b33-9668-bbcd7d889f79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.715482] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.725125] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 02a9b7be-f0a7-42f0-ac71-860e753f9408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.737960] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ece2c65c-60f9-4a4e-b135-f79d7adb188e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.746743] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1571.747027] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1571.747485] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1572.139477] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835424f7-d318-485b-940b-fd15283cffdf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.147928] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00142c6b-404a-453b-8e1b-116dcbf01eeb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.176734] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64f92dc-5f96-4028-9618-0655a896c464 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.184113] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e86157-6fa3-4eb7-b41d-3decc05038d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.196910] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.207374] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1572.223177] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1572.223369] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.765s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1576.971025] env[61964]: WARNING oslo_vmware.rw_handles [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1576.971025] env[61964]: ERROR oslo_vmware.rw_handles [ 1576.971025] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1576.971778] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1576.972153] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Copying Virtual Disk [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/a8c2aa13-121f-4d05-88fd-0e4f0ccb8e72/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1576.972626] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da5903fe-aff3-43e4-a7c1-799097c18e25 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.982587] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 1576.982587] env[61964]: value = "task-1688594" [ 1576.982587] env[61964]: _type = "Task" [ 1576.982587] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.990407] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.493974] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.994605] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.495161] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.996437] env[61964]: DEBUG oslo_vmware.exceptions [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1578.996823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1578.997278] env[61964]: ERROR nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1578.997278] env[61964]: Faults: ['InvalidArgument'] [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Traceback (most recent call last): [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] yield resources [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self.driver.spawn(context, instance, image_meta, [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self._fetch_image_if_missing(context, vi) [ 1578.997278] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] image_cache(vi, tmp_image_ds_loc) [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] vm_util.copy_virtual_disk( [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] session._wait_for_task(vmdk_copy_task) [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return self.wait_for_task(task_ref) [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return evt.wait() [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] result = hub.switch() [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1578.997677] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return self.greenlet.switch() [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self.f(*self.args, **self.kw) [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] raise exceptions.translate_fault(task_info.error) [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Faults: ['InvalidArgument'] [ 1578.998017] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] [ 1578.998017] env[61964]: INFO nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Terminating instance [ 1578.999207] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1578.999377] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.999617] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beafa2bb-ad3a-4b55-b5ed-5c31ec1f99d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.002999] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1579.003198] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1579.003966] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfa7e34-11a7-4ea4-b490-8de24e24096d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.010778] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1579.010993] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5a4e89d-6984-4383-b7d4-e56093b1d8b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.013796] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.013977] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1579.014953] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71ede500-b9e3-44c6-a2ec-86c4bdf21e7f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.020088] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Waiting for the task: (returnval){ [ 1579.020088] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528379d8-ba9d-5d02-47b9-5e3711f72974" [ 1579.020088] env[61964]: _type = "Task" [ 1579.020088] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.028664] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528379d8-ba9d-5d02-47b9-5e3711f72974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.096057] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1579.096057] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1579.096057] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleting the datastore file [datastore1] 36056842-1c0b-4f4c-a512-e250fc657620 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1579.096288] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4950618a-523d-4a47-8fb6-3bd504bbdccf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.102851] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 1579.102851] env[61964]: value = "task-1688596" [ 1579.102851] env[61964]: _type = "Task" [ 1579.102851] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.110563] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.530560] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1579.530825] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Creating directory with path [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.531070] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c386b7b6-80a2-455e-aaa8-f9d3926db138 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.542368] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Created directory with path [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.542471] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Fetch image to [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1579.542625] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1579.543436] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7a362b-9226-4ce8-81aa-d235418d4f25 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.551052] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4bab82-ec32-4ee2-8004-767d005ab26c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.559154] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8babb92d-b72c-4424-8726-683b60d3e88a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.589862] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d5e08d-22da-4401-84b8-c919b344d0f8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.596264] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0dffaf2d-f66d-44ce-9e7d-33e0ab794a3e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.611208] env[61964]: DEBUG oslo_vmware.api [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077945} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.611208] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1579.611208] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1579.611421] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1579.611600] env[61964]: INFO nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1579.613796] env[61964]: DEBUG nova.compute.claims [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1579.614020] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1579.614253] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1579.681716] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1579.742220] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1579.803159] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1579.804148] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1580.053826] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9797bb-c0df-4822-8e7b-5574e929caac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.061301] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4606643-d4c1-458a-9b31-ad9d52307b44 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.090769] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f573647c-d5d9-4fde-98bc-b29ea1907056 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.097872] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0bffd4-0f4a-42a1-985b-538428cf29b0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.111468] env[61964]: DEBUG nova.compute.provider_tree [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.122228] env[61964]: DEBUG nova.scheduler.client.report [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1580.137145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.523s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1580.137674] env[61964]: ERROR nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1580.137674] env[61964]: Faults: ['InvalidArgument'] [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Traceback (most recent call last): [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self.driver.spawn(context, instance, image_meta, [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self._fetch_image_if_missing(context, vi) [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] image_cache(vi, tmp_image_ds_loc) [ 1580.137674] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] vm_util.copy_virtual_disk( [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] session._wait_for_task(vmdk_copy_task) [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return self.wait_for_task(task_ref) [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return evt.wait() [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] result = hub.switch() [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] return self.greenlet.switch() [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1580.138016] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] self.f(*self.args, **self.kw) [ 1580.138364] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1580.138364] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] raise exceptions.translate_fault(task_info.error) [ 1580.138364] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1580.138364] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Faults: ['InvalidArgument'] [ 1580.138364] env[61964]: ERROR nova.compute.manager [instance: 36056842-1c0b-4f4c-a512-e250fc657620] [ 1580.138364] env[61964]: DEBUG nova.compute.utils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1580.139690] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Build of instance 36056842-1c0b-4f4c-a512-e250fc657620 was re-scheduled: A specified parameter was not correct: fileType [ 1580.139690] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1580.140066] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1580.140240] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1580.140393] env[61964]: DEBUG nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1580.140551] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1580.490529] env[61964]: DEBUG nova.network.neutron [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.502122] env[61964]: INFO nova.compute.manager [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 36056842-1c0b-4f4c-a512-e250fc657620] Took 0.36 seconds to deallocate network for instance. [ 1580.596547] env[61964]: INFO nova.scheduler.client.report [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleted allocations for instance 36056842-1c0b-4f4c-a512-e250fc657620 [ 1580.620176] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ba353d7c-abda-40d6-8c72-f42b9696fbdb tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "36056842-1c0b-4f4c-a512-e250fc657620" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.009s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1580.631394] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1580.681263] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1580.681515] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1580.683060] env[61964]: INFO nova.compute.claims [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1581.081630] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88023830-b111-4308-9e0c-b7e87ef75583 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.089792] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fec0e4-14c3-43de-8c62-aa277283795d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.119299] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ced5c5-f3c5-4312-b3f1-a5b09ce484b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.126332] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2fdd70-d8ac-4949-a2ea-fdc416c823a1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.139085] env[61964]: DEBUG nova.compute.provider_tree [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.147653] env[61964]: DEBUG nova.scheduler.client.report [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1581.164662] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.483s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1581.165189] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1581.211285] env[61964]: DEBUG nova.compute.utils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.212670] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1581.212787] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1581.222665] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1581.285111] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1581.290051] env[61964]: DEBUG nova.policy [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd48d83c9bcdd4e2fbce0ab280c1179fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '073251d4523440dc83bf9b7ea2244faf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1581.309641] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1581.309879] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1581.310031] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.310215] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1581.310358] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.310500] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1581.310710] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1581.310873] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1581.311044] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1581.311208] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1581.311376] env[61964]: DEBUG nova.virt.hardware [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1581.312237] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e644ae-95aa-493f-8351-24052a83f8d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.320334] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281d8055-88fc-45cb-aa2d-e55e0e753dfa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.734013] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Successfully created port: bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1582.553464] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Successfully updated port: bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1582.564499] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1582.564643] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquired lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1582.564778] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1582.791213] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1583.019041] env[61964]: DEBUG nova.compute.manager [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Received event network-vif-plugged-bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1583.019264] env[61964]: DEBUG oslo_concurrency.lockutils [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] Acquiring lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1583.019469] env[61964]: DEBUG oslo_concurrency.lockutils [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1583.019634] env[61964]: DEBUG oslo_concurrency.lockutils [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1583.019794] env[61964]: DEBUG nova.compute.manager [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] No waiting events found dispatching network-vif-plugged-bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1583.019951] env[61964]: WARNING nova.compute.manager [req-22e1b609-856f-43ae-b0b7-8c3d13e322cc req-936ee0cf-2607-4a4c-8ed1-fa96851a1802 service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Received unexpected event network-vif-plugged-bbb62e2a-bf32-4be9-bacd-84666d8f0d07 for instance with vm_state building and task_state spawning. [ 1583.051658] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "2b69def4-b892-4d76-bfd2-841014f75098" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1583.051984] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1583.053234] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Updating instance_info_cache with network_info: [{"id": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "address": "fa:16:3e:18:c8:ef", "network": {"id": "0c11d391-fbe7-444f-bd0e-6fcdfd6940c1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-290265996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "073251d4523440dc83bf9b7ea2244faf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb62e2a-bf", "ovs_interfaceid": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.069274] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Releasing lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1583.069641] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance network_info: |[{"id": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "address": "fa:16:3e:18:c8:ef", "network": {"id": "0c11d391-fbe7-444f-bd0e-6fcdfd6940c1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-290265996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "073251d4523440dc83bf9b7ea2244faf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb62e2a-bf", "ovs_interfaceid": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1583.070130] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:c8:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbb62e2a-bf32-4be9-bacd-84666d8f0d07', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1583.082699] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Creating folder: Project (073251d4523440dc83bf9b7ea2244faf). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1583.083440] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e607aa61-1168-4d18-9aa0-ead8eee81ed2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.096634] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Created folder: Project (073251d4523440dc83bf9b7ea2244faf) in parent group-v351942. [ 1583.096820] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Creating folder: Instances. Parent ref: group-v351977. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1583.097057] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6cceb82-0042-4640-bc20-534839fb9fb3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.105430] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Created folder: Instances in parent group-v351977. [ 1583.105681] env[61964]: DEBUG oslo.service.loopingcall [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1583.105860] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1583.106061] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcfe077d-75bd-4880-a536-b6d1d252c255 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.124989] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1583.124989] env[61964]: value = "task-1688599" [ 1583.124989] env[61964]: _type = "Task" [ 1583.124989] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.132212] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688599, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.635250] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688599, 'name': CreateVM_Task, 'duration_secs': 0.307726} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.635556] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1583.636136] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1583.636303] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1583.636613] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1583.636869] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f78846e-864e-4aa5-b3aa-5f950d66b849 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.641381] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for the task: (returnval){ [ 1583.641381] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52179e61-592a-6591-dcd0-4209ab4cb5eb" [ 1583.641381] env[61964]: _type = "Task" [ 1583.641381] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.648948] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52179e61-592a-6591-dcd0-4209ab4cb5eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.153102] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1584.153353] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1584.153604] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1585.044577] env[61964]: DEBUG nova.compute.manager [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Received event network-changed-bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1585.044845] env[61964]: DEBUG nova.compute.manager [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Refreshing instance network info cache due to event network-changed-bbb62e2a-bf32-4be9-bacd-84666d8f0d07. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1585.044972] env[61964]: DEBUG oslo_concurrency.lockutils [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] Acquiring lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1585.045128] env[61964]: DEBUG oslo_concurrency.lockutils [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] Acquired lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1585.045286] env[61964]: DEBUG nova.network.neutron [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Refreshing network info cache for port bbb62e2a-bf32-4be9-bacd-84666d8f0d07 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1585.360515] env[61964]: DEBUG nova.network.neutron [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Updated VIF entry in instance network info cache for port bbb62e2a-bf32-4be9-bacd-84666d8f0d07. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1585.360858] env[61964]: DEBUG nova.network.neutron [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Updating instance_info_cache with network_info: [{"id": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "address": "fa:16:3e:18:c8:ef", "network": {"id": "0c11d391-fbe7-444f-bd0e-6fcdfd6940c1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-290265996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "073251d4523440dc83bf9b7ea2244faf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb62e2a-bf", "ovs_interfaceid": "bbb62e2a-bf32-4be9-bacd-84666d8f0d07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.370351] env[61964]: DEBUG oslo_concurrency.lockutils [req-b73ff303-d8d6-40c2-a3ea-79555b9b8f3a req-219137d3-c268-4e23-9031-47df90a2bd7b service nova] Releasing lock "refresh_cache-34e97fca-5664-418e-bb12-8c16ddb3b0c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1623.223255] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1623.223635] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1623.223635] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1623.244055] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244258] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244374] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244531] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244664] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244787] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.244911] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.245042] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.245166] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.245285] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1623.245404] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1624.383920] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.582271] env[61964]: WARNING oslo_vmware.rw_handles [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1625.582271] env[61964]: ERROR oslo_vmware.rw_handles [ 1625.583239] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1625.584527] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1625.584800] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Copying Virtual Disk [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/49c54d7b-60c9-4643-8fd1-55918537655f/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1625.585102] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0011696-6476-4752-9a60-53dae4ec2076 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.593305] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Waiting for the task: (returnval){ [ 1625.593305] env[61964]: value = "task-1688600" [ 1625.593305] env[61964]: _type = "Task" [ 1625.593305] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.601569] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Task: {'id': task-1688600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.103692] env[61964]: DEBUG oslo_vmware.exceptions [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1626.104142] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1626.104762] env[61964]: ERROR nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.104762] env[61964]: Faults: ['InvalidArgument'] [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Traceback (most recent call last): [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] yield resources [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self.driver.spawn(context, instance, image_meta, [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self._fetch_image_if_missing(context, vi) [ 1626.104762] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] image_cache(vi, tmp_image_ds_loc) [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] vm_util.copy_virtual_disk( [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] session._wait_for_task(vmdk_copy_task) [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return self.wait_for_task(task_ref) [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return evt.wait() [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] result = hub.switch() [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1626.105109] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return self.greenlet.switch() [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self.f(*self.args, **self.kw) [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] raise exceptions.translate_fault(task_info.error) [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Faults: ['InvalidArgument'] [ 1626.105414] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] [ 1626.105414] env[61964]: INFO nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Terminating instance [ 1626.106945] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1626.107213] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.107496] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0f32786-88da-410f-90fa-dbc2d2d6fcc3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.109931] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1626.110144] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1626.110850] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452eec33-f5e4-4a57-8055-67bbc16bd45d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.117779] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1626.118015] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ede380d-4c56-4a0a-92cf-82319aef6469 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.120457] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.120625] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1626.121617] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a18bc9-56b0-4ffd-8fb1-44d562502d06 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.126598] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for the task: (returnval){ [ 1626.126598] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de3c6b-7ffd-2862-8826-5eb97432a959" [ 1626.126598] env[61964]: _type = "Task" [ 1626.126598] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.133621] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de3c6b-7ffd-2862-8826-5eb97432a959, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.208015] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1626.208248] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1626.208590] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Deleting the datastore file [datastore1] 41046d8c-861e-4bb4-8f7f-ae7c4d494964 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1626.208883] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e837a720-fd00-4921-bbc6-8194e069dd0b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.215122] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Waiting for the task: (returnval){ [ 1626.215122] env[61964]: value = "task-1688602" [ 1626.215122] env[61964]: _type = "Task" [ 1626.215122] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.223169] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Task: {'id': task-1688602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.638144] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1626.638402] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Creating directory with path [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.638623] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b61099e6-913c-4c04-b2ae-4224d0eea795 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.649385] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Created directory with path [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.649557] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Fetch image to [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1626.649721] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1626.650424] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f45caa-dc96-469a-9561-601d1d3c4696 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.657106] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3443e91-9ff7-4ee6-99c5-b8f330ca1a5d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.666197] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ea54bc-bfd4-4967-b4bf-3406a761bf77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.696167] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c4cd5d-9258-40e8-9216-b390bed6e65e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.701841] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-726c62e2-36d2-4a1a-8ef8-993511a3d552 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.723379] env[61964]: DEBUG oslo_vmware.api [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Task: {'id': task-1688602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067953} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.724736] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1626.724933] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1626.725196] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1626.725294] env[61964]: INFO nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1626.727025] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1626.728927] env[61964]: DEBUG nova.compute.claims [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1626.729111] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1626.729356] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1626.790947] env[61964]: DEBUG oslo_vmware.rw_handles [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1626.851766] env[61964]: DEBUG oslo_vmware.rw_handles [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1626.851949] env[61964]: DEBUG oslo_vmware.rw_handles [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1627.214340] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9227c409-931b-4859-b583-573d58b25594 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.222593] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ae6787-2854-4834-8c99-0a616be3df68 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.254096] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f7f0d4-0a88-46f7-9d2c-678f3ecc420c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.262270] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d019ce5-e1c1-493e-9e19-9211bfd0a3cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.277163] env[61964]: DEBUG nova.compute.provider_tree [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.286086] env[61964]: DEBUG nova.scheduler.client.report [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1627.304149] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.575s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.304770] env[61964]: ERROR nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.304770] env[61964]: Faults: ['InvalidArgument'] [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Traceback (most recent call last): [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self.driver.spawn(context, instance, image_meta, [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self._fetch_image_if_missing(context, vi) [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] image_cache(vi, tmp_image_ds_loc) [ 1627.304770] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] vm_util.copy_virtual_disk( [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] session._wait_for_task(vmdk_copy_task) [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return self.wait_for_task(task_ref) [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return evt.wait() [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] result = hub.switch() [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] return self.greenlet.switch() [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1627.305144] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] self.f(*self.args, **self.kw) [ 1627.305513] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1627.305513] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] raise exceptions.translate_fault(task_info.error) [ 1627.305513] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.305513] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Faults: ['InvalidArgument'] [ 1627.305513] env[61964]: ERROR nova.compute.manager [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] [ 1627.305513] env[61964]: DEBUG nova.compute.utils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1627.308119] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Build of instance 41046d8c-861e-4bb4-8f7f-ae7c4d494964 was re-scheduled: A specified parameter was not correct: fileType [ 1627.308119] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1627.308119] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1627.308119] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1627.308119] env[61964]: DEBUG nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1627.308329] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1627.758787] env[61964]: DEBUG nova.network.neutron [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.773131] env[61964]: INFO nova.compute.manager [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] [instance: 41046d8c-861e-4bb4-8f7f-ae7c4d494964] Took 0.46 seconds to deallocate network for instance. [ 1627.893094] env[61964]: INFO nova.scheduler.client.report [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Deleted allocations for instance 41046d8c-861e-4bb4-8f7f-ae7c4d494964 [ 1627.925031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fa74f4a4-0093-4e98-9437-a2a7505474f2 tempest-ServerDiagnosticsTest-781628629 tempest-ServerDiagnosticsTest-781628629-project-member] Lock "41046d8c-861e-4bb4-8f7f-ae7c4d494964" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.681s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1627.951778] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1628.017391] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1628.018191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1628.019463] env[61964]: INFO nova.compute.claims [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.379590] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.385836] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.386518] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.386812] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1628.475212] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dace9654-6f15-4a2c-b06c-b34ef6bef68d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.483840] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86687b0-4dc3-41db-861a-dfe2bbf52e62 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.514759] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c8ee73-4303-487b-9863-66be5a7b6c75 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.522723] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b14ed4b-3a01-4e03-a89b-c66d37479708 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.537356] env[61964]: DEBUG nova.compute.provider_tree [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.546204] env[61964]: DEBUG nova.scheduler.client.report [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1628.566501] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.549s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1628.567082] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1628.604755] env[61964]: DEBUG nova.compute.utils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.606271] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1628.606441] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1628.634985] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1628.675222] env[61964]: DEBUG nova.policy [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd51a99e513144cbac3e397d7ec1dc45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40991aebb97545db8c04deda187dcdfd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1628.724883] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1628.753607] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1628.753854] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1628.754022] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.754220] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1628.754369] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.754510] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1628.754711] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1628.754862] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1628.755060] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1628.755239] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1628.755412] env[61964]: DEBUG nova.virt.hardware [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1628.756304] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f8ebf4-f91e-439d-a657-4edfe27c5023 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.764669] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c49eb66-2061-4a86-aff5-3fa5731afac5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.116796] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Successfully created port: 4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1629.384331] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.384572] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.124481] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Successfully updated port: 4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.155158] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1630.155386] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1630.156090] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1630.194904] env[61964]: DEBUG nova.compute.manager [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Received event network-vif-plugged-4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1630.195215] env[61964]: DEBUG oslo_concurrency.lockutils [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] Acquiring lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1630.195438] env[61964]: DEBUG oslo_concurrency.lockutils [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] Lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1630.195574] env[61964]: DEBUG oslo_concurrency.lockutils [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] Lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1630.195916] env[61964]: DEBUG nova.compute.manager [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] No waiting events found dispatching network-vif-plugged-4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1630.196174] env[61964]: WARNING nova.compute.manager [req-861f72be-140e-43cd-970e-0655318e2b29 req-ae55ab4e-777d-4a1c-967e-5801d5084fe8 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Received unexpected event network-vif-plugged-4ddebefb-a058-44f4-b830-d2b0906126a0 for instance with vm_state building and task_state spawning. [ 1630.217295] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1630.385489] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.654167] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Updating instance_info_cache with network_info: [{"id": "4ddebefb-a058-44f4-b830-d2b0906126a0", "address": "fa:16:3e:96:f8:d3", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ddebefb-a0", "ovs_interfaceid": "4ddebefb-a058-44f4-b830-d2b0906126a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.666011] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1630.666364] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance network_info: |[{"id": "4ddebefb-a058-44f4-b830-d2b0906126a0", "address": "fa:16:3e:96:f8:d3", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ddebefb-a0", "ovs_interfaceid": "4ddebefb-a058-44f4-b830-d2b0906126a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1630.666776] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:f8:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ddebefb-a058-44f4-b830-d2b0906126a0', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1630.674417] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating folder: Project (40991aebb97545db8c04deda187dcdfd). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1630.675017] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca269510-55bc-4fcb-9aaa-baf5547f364a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.687018] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created folder: Project (40991aebb97545db8c04deda187dcdfd) in parent group-v351942. [ 1630.687254] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating folder: Instances. Parent ref: group-v351980. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1630.687528] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa570aec-e1ab-49a9-96ea-49a18d408ec1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.696240] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created folder: Instances in parent group-v351980. [ 1630.696470] env[61964]: DEBUG oslo.service.loopingcall [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.696654] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1630.696852] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd15f608-e2d6-4aa2-968f-d7f25406898b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.717625] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1630.717625] env[61964]: value = "task-1688605" [ 1630.717625] env[61964]: _type = "Task" [ 1630.717625] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.728117] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688605, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.230110] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688605, 'name': CreateVM_Task, 'duration_secs': 0.300739} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.230464] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1631.231248] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1631.231363] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1631.231734] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1631.232105] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba35dfcc-2244-47f3-b651-5e7158ab0653 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.237058] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 1631.237058] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be2066-bcc7-910e-fb6a-f5475133f8b1" [ 1631.237058] env[61964]: _type = "Task" [ 1631.237058] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.245129] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be2066-bcc7-910e-fb6a-f5475133f8b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.748793] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1631.749082] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1631.749305] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1632.317061] env[61964]: DEBUG nova.compute.manager [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Received event network-changed-4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1632.317363] env[61964]: DEBUG nova.compute.manager [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Refreshing instance network info cache due to event network-changed-4ddebefb-a058-44f4-b830-d2b0906126a0. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1632.317593] env[61964]: DEBUG oslo_concurrency.lockutils [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] Acquiring lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1632.317593] env[61964]: DEBUG oslo_concurrency.lockutils [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] Acquired lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1632.317839] env[61964]: DEBUG nova.network.neutron [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Refreshing network info cache for port 4ddebefb-a058-44f4-b830-d2b0906126a0 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1632.383904] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1632.401088] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1632.401327] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1632.401499] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1632.401653] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1632.402806] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52f691b-866c-4825-9ae6-764b48c91202 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.412091] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bac44b9-19dc-4750-b6e7-5387cbaaac5d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.430854] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3e8e93-7402-49d8-a56c-cb290cc9ea2c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.438050] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c66a8e-3988-4e70-ad94-324f8ff07b76 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.473450] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181370MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1632.473621] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1632.473851] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1632.559240] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.559408] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c5dd385e-2447-4539-aed1-81d957076f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.559731] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.559890] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560030] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560158] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560278] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560393] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560507] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.560620] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1632.572074] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 871ae746-5bc6-4979-80be-9f67d0db5301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.583285] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8ea5b2d1-bfc8-4531-b5fe-60926e4950d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.594709] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.606487] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.618616] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5bafa3d0-3619-4d7e-b625-9b389394738f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.631906] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 40aa82a7-09f6-4e99-bd8b-32c500aac259 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.646778] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 24dfefdd-8357-4f82-aaf0-9f6e6907291f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.659245] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance daea8bd9-5d0a-40e3-9353-28785d5fd7ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.670214] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5413b152-2cd7-41c4-bf18-bd3d51971a6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.681189] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 571e9cdb-28ac-43de-a0ed-45458f12d68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.691948] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.703152] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ad4f562-805b-48e0-99ae-53934da0af4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.715287] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 068818f5-6df6-4de7-8158-c4f8bf11bb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.725555] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9fa91cf5-e051-4136-bdd2-66beb12a8cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.738272] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance be0bc30b-c63d-4b33-9668-bbcd7d889f79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.749438] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.764484] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 02a9b7be-f0a7-42f0-ac71-860e753f9408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.780235] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ece2c65c-60f9-4a4e-b135-f79d7adb188e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.792984] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.804071] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1632.804335] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1632.804484] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1633.045937] env[61964]: DEBUG nova.network.neutron [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Updated VIF entry in instance network info cache for port 4ddebefb-a058-44f4-b830-d2b0906126a0. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1633.046323] env[61964]: DEBUG nova.network.neutron [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Updating instance_info_cache with network_info: [{"id": "4ddebefb-a058-44f4-b830-d2b0906126a0", "address": "fa:16:3e:96:f8:d3", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ddebefb-a0", "ovs_interfaceid": "4ddebefb-a058-44f4-b830-d2b0906126a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.066483] env[61964]: DEBUG oslo_concurrency.lockutils [req-71b319f8-5250-4c60-b7fd-6cd15f15dcd6 req-8c352d0c-187a-47ae-8ab1-55c147c947a5 service nova] Releasing lock "refresh_cache-ac955d73-c844-4b98-b791-7d7c749c6954" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1633.292179] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21236c8f-99b8-4d04-a6fc-18937089ac00 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.301885] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef0655f-4e46-4b27-be85-1e754b834491 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.339288] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6e8347-5157-4f5f-8e2f-1e7f664290b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.351023] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e878c5a3-5110-48fa-99f1-401e3b6c569e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.362113] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1633.375973] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1633.395028] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1633.395028] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.921s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1634.758907] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1634.759310] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1636.390165] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.312382] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1647.008787] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1647.289967] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "b8993737-d2ef-4987-8c91-d1320771434a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1651.427239] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1651.440690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1656.769709] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1664.035496] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1666.931581] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1668.536067] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "ac955d73-c844-4b98-b791-7d7c749c6954" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1672.527205] env[61964]: WARNING oslo_vmware.rw_handles [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.527205] env[61964]: ERROR oslo_vmware.rw_handles [ 1672.527205] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1672.528587] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1672.528587] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Copying Virtual Disk [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/76d979ee-ebd0-4918-94ff-89a6533200a4/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1672.528877] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e84ae26d-2b99-45f9-a541-eaf0b1e8ab33 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.538710] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for the task: (returnval){ [ 1672.538710] env[61964]: value = "task-1688606" [ 1672.538710] env[61964]: _type = "Task" [ 1672.538710] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.550490] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Task: {'id': task-1688606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.051016] env[61964]: DEBUG oslo_vmware.exceptions [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1673.051389] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1673.052011] env[61964]: ERROR nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.052011] env[61964]: Faults: ['InvalidArgument'] [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Traceback (most recent call last): [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] yield resources [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self.driver.spawn(context, instance, image_meta, [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self._fetch_image_if_missing(context, vi) [ 1673.052011] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] image_cache(vi, tmp_image_ds_loc) [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] vm_util.copy_virtual_disk( [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] session._wait_for_task(vmdk_copy_task) [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return self.wait_for_task(task_ref) [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return evt.wait() [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] result = hub.switch() [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1673.052376] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return self.greenlet.switch() [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self.f(*self.args, **self.kw) [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] raise exceptions.translate_fault(task_info.error) [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Faults: ['InvalidArgument'] [ 1673.052713] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] [ 1673.052713] env[61964]: INFO nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Terminating instance [ 1673.058044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1673.058044] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.058044] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1673.058044] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1673.058044] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27523265-c270-40a5-bbf0-128625e301a7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.066558] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb79d1e-7d32-444c-a7e6-4d057a9e0b88 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.069436] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1673.070020] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1f0ae1f-96e7-444a-919d-6529b5530370 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.074031] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.074031] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1673.074031] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adb6f8dc-1d40-4275-aaac-69042629cf0f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.079920] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1673.079920] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52918fe7-1733-618c-25b8-5bc33ad76f3b" [ 1673.079920] env[61964]: _type = "Task" [ 1673.079920] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.088184] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52918fe7-1733-618c-25b8-5bc33ad76f3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.222051] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1673.222051] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1673.222051] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Deleting the datastore file [datastore1] f2c66aa6-4406-4cfa-8a13-c382eebed6bc {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.222051] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca58fbcf-4ccf-4b35-85f2-783018bb3dab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.232028] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for the task: (returnval){ [ 1673.232028] env[61964]: value = "task-1688608" [ 1673.232028] env[61964]: _type = "Task" [ 1673.232028] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.240590] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Task: {'id': task-1688608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.992749] env[61964]: DEBUG oslo_vmware.api [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Task: {'id': task-1688608, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075683} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.993128] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1673.993534] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating directory with path [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.998243] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.998243] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1673.999283] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1673.999283] env[61964]: INFO nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Took 0.94 seconds to destroy the instance on the hypervisor. [ 1674.003062] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fbd08bc-ddde-4168-8b01-3dbbc3ebe9e7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.004316] env[61964]: DEBUG nova.compute.claims [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1674.004948] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1674.004948] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1674.014628] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created directory with path [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.014832] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Fetch image to [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1674.015055] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1674.015794] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca33228-6011-4950-bcad-1a8701409bf2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.029110] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88eb8259-928f-4eb1-9e96-0af50adf35fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.044939] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f10e756-9424-45db-96da-8b7438d83716 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.096162] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e01cbc-e294-4b02-a4af-af1d78fa2868 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.103084] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-393533ac-e9fe-444b-b057-7b2a30347d11 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.123748] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1674.192567] env[61964]: DEBUG oslo_vmware.rw_handles [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1674.261786] env[61964]: DEBUG oslo_vmware.rw_handles [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1674.261786] env[61964]: DEBUG oslo_vmware.rw_handles [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1674.559515] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0937d9-e02b-464f-afe6-50b871072326 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.566973] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af96d9b-4c7a-4b12-9cd0-b639a0c8e805 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.599035] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64af279-6649-4047-a89b-6f149370bffb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.608032] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875d6715-a3a3-4773-afc1-5f5ac93653fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.620625] env[61964]: DEBUG nova.compute.provider_tree [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.635138] env[61964]: DEBUG nova.scheduler.client.report [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1674.657536] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.652s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1674.658231] env[61964]: ERROR nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.658231] env[61964]: Faults: ['InvalidArgument'] [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Traceback (most recent call last): [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self.driver.spawn(context, instance, image_meta, [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self._fetch_image_if_missing(context, vi) [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] image_cache(vi, tmp_image_ds_loc) [ 1674.658231] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] vm_util.copy_virtual_disk( [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] session._wait_for_task(vmdk_copy_task) [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return self.wait_for_task(task_ref) [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return evt.wait() [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] result = hub.switch() [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] return self.greenlet.switch() [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1674.658577] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] self.f(*self.args, **self.kw) [ 1674.659068] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1674.659068] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] raise exceptions.translate_fault(task_info.error) [ 1674.659068] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.659068] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Faults: ['InvalidArgument'] [ 1674.659068] env[61964]: ERROR nova.compute.manager [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] [ 1674.659743] env[61964]: DEBUG nova.compute.utils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1674.661948] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Build of instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc was re-scheduled: A specified parameter was not correct: fileType [ 1674.661948] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1674.661948] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1674.662263] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1674.662498] env[61964]: DEBUG nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1674.662755] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1674.877205] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a1ec4d5f-0426-48a2-91f3-2cb728392611 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "26741651-12c2-4ef0-bbe4-37e981f9a7f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1674.877205] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a1ec4d5f-0426-48a2-91f3-2cb728392611 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "26741651-12c2-4ef0-bbe4-37e981f9a7f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1675.146271] env[61964]: DEBUG nova.network.neutron [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.162336] env[61964]: INFO nova.compute.manager [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Took 0.50 seconds to deallocate network for instance. [ 1675.331389] env[61964]: INFO nova.scheduler.client.report [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Deleted allocations for instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc [ 1675.360571] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5d137bf8-c388-4cfb-9656-796d35e3ea84 tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.137s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.360885] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 37.048s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1675.361069] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Acquiring lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1675.362097] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1675.362097] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.363316] env[61964]: INFO nova.compute.manager [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Terminating instance [ 1675.365348] env[61964]: DEBUG nova.compute.manager [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1675.365892] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1675.366294] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36a73437-5fdb-4715-94b5-4d3d7ae84932 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.378337] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572280f7-2429-48e3-8aaa-a9edcf3f0be2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.396506] env[61964]: DEBUG nova.compute.manager [None req-60a69151-7460-4deb-8c2e-fec49fd82361 tempest-ImagesOneServerTestJSON-773828785 tempest-ImagesOneServerTestJSON-773828785-project-member] [instance: 871ae746-5bc6-4979-80be-9f67d0db5301] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1675.421682] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2c66aa6-4406-4cfa-8a13-c382eebed6bc could not be found. [ 1675.421905] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1675.422095] env[61964]: INFO nova.compute.manager [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1675.422354] env[61964]: DEBUG oslo.service.loopingcall [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.422583] env[61964]: DEBUG nova.compute.manager [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1675.422682] env[61964]: DEBUG nova.network.neutron [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1675.428947] env[61964]: DEBUG nova.compute.manager [None req-60a69151-7460-4deb-8c2e-fec49fd82361 tempest-ImagesOneServerTestJSON-773828785 tempest-ImagesOneServerTestJSON-773828785-project-member] [instance: 871ae746-5bc6-4979-80be-9f67d0db5301] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1675.459041] env[61964]: DEBUG nova.network.neutron [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.462660] env[61964]: DEBUG oslo_concurrency.lockutils [None req-60a69151-7460-4deb-8c2e-fec49fd82361 tempest-ImagesOneServerTestJSON-773828785 tempest-ImagesOneServerTestJSON-773828785-project-member] Lock "871ae746-5bc6-4979-80be-9f67d0db5301" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.443s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.468987] env[61964]: INFO nova.compute.manager [-] [instance: f2c66aa6-4406-4cfa-8a13-c382eebed6bc] Took 0.05 seconds to deallocate network for instance. [ 1675.477469] env[61964]: DEBUG nova.compute.manager [None req-518d4398-cde3-4c61-8fb8-8d1ef2d67f68 tempest-InstanceActionsTestJSON-2141208745 tempest-InstanceActionsTestJSON-2141208745-project-member] [instance: 8ea5b2d1-bfc8-4531-b5fe-60926e4950d5] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1675.503007] env[61964]: DEBUG nova.compute.manager [None req-518d4398-cde3-4c61-8fb8-8d1ef2d67f68 tempest-InstanceActionsTestJSON-2141208745 tempest-InstanceActionsTestJSON-2141208745-project-member] [instance: 8ea5b2d1-bfc8-4531-b5fe-60926e4950d5] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1675.546704] env[61964]: DEBUG oslo_concurrency.lockutils [None req-518d4398-cde3-4c61-8fb8-8d1ef2d67f68 tempest-InstanceActionsTestJSON-2141208745 tempest-InstanceActionsTestJSON-2141208745-project-member] Lock "8ea5b2d1-bfc8-4531-b5fe-60926e4950d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.325s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.565653] env[61964]: DEBUG nova.compute.manager [None req-83f090aa-998b-4eb8-9301-957352111ba0 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1675.595657] env[61964]: DEBUG nova.compute.manager [None req-83f090aa-998b-4eb8-9301-957352111ba0 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1675.619470] env[61964]: DEBUG oslo_concurrency.lockutils [None req-82dcc2d4-8d8e-47b3-a73b-fd7326139afd tempest-ServerMetadataNegativeTestJSON-765880896 tempest-ServerMetadataNegativeTestJSON-765880896-project-member] Lock "f2c66aa6-4406-4cfa-8a13-c382eebed6bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.259s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.626152] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83f090aa-998b-4eb8-9301-957352111ba0 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "bca2d5b3-2ec6-4b98-859e-5ae9d49eb23c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.576s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1675.638540] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1675.697057] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1675.697328] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1675.698888] env[61964]: INFO nova.compute.claims [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.174017] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed20d6aa-00f1-428a-8516-daa1c54cd446 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.181364] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2b028d-6a2c-4ad6-b3c8-b13ff91138fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.219276] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdc934f-702b-45b8-a785-8f50488f255c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.233387] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d144cf30-6f68-44ea-adbb-571851127642 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.251855] env[61964]: DEBUG nova.compute.provider_tree [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1676.266661] env[61964]: DEBUG nova.scheduler.client.report [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1676.294522] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.597s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1676.295477] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1676.350233] env[61964]: DEBUG nova.compute.utils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1676.351824] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1676.352119] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1676.367154] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1676.454155] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1676.484173] env[61964]: DEBUG nova.policy [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b94cc3cb460f4cc6a264dd70c4c4d064', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40c88d0201ba4420a2e8ae0d237c29f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1676.494505] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1676.495700] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1676.495700] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.495700] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1676.495700] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.495700] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1676.495939] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1676.495939] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1676.495939] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1676.496062] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1676.496207] env[61964]: DEBUG nova.virt.hardware [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1676.497105] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833e4d60-3e44-4573-bd81-066825d50d8d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.505303] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c502481c-a9ca-42c7-b1d6-1e61c75700db {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.755376] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1677.675795] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully created port: b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.183690] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully created port: aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1680.523028] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully created port: eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1682.759374] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully updated port: b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1683.378535] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "66787186-e8c6-4700-9caf-bd7e7970b65d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1683.379030] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1683.962418] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully updated port: aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.219222] env[61964]: DEBUG nova.compute.manager [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-vif-plugged-b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1684.219222] env[61964]: DEBUG oslo_concurrency.lockutils [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1684.219222] env[61964]: DEBUG oslo_concurrency.lockutils [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1684.219222] env[61964]: DEBUG oslo_concurrency.lockutils [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1684.219421] env[61964]: DEBUG nova.compute.manager [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] No waiting events found dispatching network-vif-plugged-b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1684.219463] env[61964]: WARNING nova.compute.manager [req-4118c310-408d-4a63-a911-bf0afde2af79 req-bb267e63-b958-4b91-8601-d59ab58c4c5a service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received unexpected event network-vif-plugged-b08400e0-1bdd-40ef-8488-0c78d965efbd for instance with vm_state building and task_state deleting. [ 1684.383303] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1684.383514] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1684.383662] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1684.416870] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.416870] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.417037] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418118] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418118] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418118] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418118] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418118] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418439] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418439] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1684.418439] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1684.549738] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8defa999-b2a5-4e88-a6d8-f99dabcae3c7 tempest-InstanceActionsV221TestJSON-1256052157 tempest-InstanceActionsV221TestJSON-1256052157-project-member] Acquiring lock "3453eda4-41f7-4558-a2cc-9dbce697c4e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1684.549738] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8defa999-b2a5-4e88-a6d8-f99dabcae3c7 tempest-InstanceActionsV221TestJSON-1256052157 tempest-InstanceActionsV221TestJSON-1256052157-project-member] Lock "3453eda4-41f7-4558-a2cc-9dbce697c4e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1686.346498] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Successfully updated port: eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1686.363850] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1686.364012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1686.364154] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1686.384081] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1686.466875] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1686.875817] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-changed-b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1686.875817] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing instance network info cache due to event network-changed-b08400e0-1bdd-40ef-8488-0c78d965efbd. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1686.881698] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquiring lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1688.077992] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [{"id": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "address": "fa:16:3e:7a:c9:2f", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb08400e0-1b", "ovs_interfaceid": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "address": "fa:16:3e:41:e7:4a", "network": {"id": "e5132112-9400-4b87-95f6-8b79c9c862c1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279062574", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa12fa8d-06", "ovs_interfaceid": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eaed692c-1ccf-4db0-b620-793548f40355", "address": "fa:16:3e:02:e1:a4", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaed692c-1c", "ovs_interfaceid": "eaed692c-1ccf-4db0-b620-793548f40355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.097630] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1688.098056] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance network_info: |[{"id": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "address": "fa:16:3e:7a:c9:2f", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb08400e0-1b", "ovs_interfaceid": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "address": "fa:16:3e:41:e7:4a", "network": {"id": "e5132112-9400-4b87-95f6-8b79c9c862c1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279062574", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa12fa8d-06", "ovs_interfaceid": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eaed692c-1ccf-4db0-b620-793548f40355", "address": "fa:16:3e:02:e1:a4", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaed692c-1c", "ovs_interfaceid": "eaed692c-1ccf-4db0-b620-793548f40355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1688.098402] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquired lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1688.098587] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing network info cache for port b08400e0-1bdd-40ef-8488-0c78d965efbd {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1688.101241] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:c9:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b08400e0-1bdd-40ef-8488-0c78d965efbd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:e7:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa12fa8d-0619-420a-acf8-61bc76c4aacd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:e1:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaed692c-1ccf-4db0-b620-793548f40355', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1688.120542] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating folder: Project (40c88d0201ba4420a2e8ae0d237c29f8). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1688.121764] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f2890d1-8fca-42b2-a827-24124f294a7e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.143407] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created folder: Project (40c88d0201ba4420a2e8ae0d237c29f8) in parent group-v351942. [ 1688.143698] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating folder: Instances. Parent ref: group-v351983. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1688.143881] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b86ef62-03f7-4d72-8a27-68b24f3df310 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.154522] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created folder: Instances in parent group-v351983. [ 1688.154779] env[61964]: DEBUG oslo.service.loopingcall [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.154972] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1688.155192] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e26144a4-cabc-498b-bacb-34543ea20109 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.182923] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1688.182923] env[61964]: value = "task-1688611" [ 1688.182923] env[61964]: _type = "Task" [ 1688.182923] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.193512] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688611, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.384489] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1688.702448] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688611, 'name': CreateVM_Task, 'duration_secs': 0.443353} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.702624] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1688.703587] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1688.703751] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1688.704076] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1688.704343] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd68ab5-04b9-4e73-89fb-82d373cba5aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.712971] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 1688.712971] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c8a2c7-3907-1a80-e3ce-1157000d3bce" [ 1688.712971] env[61964]: _type = "Task" [ 1688.712971] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.724319] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c8a2c7-3907-1a80-e3ce-1157000d3bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.811774] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fcc9efb1-aa91-4c46-b085-6622bc8531bf tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Acquiring lock "30c8aea5-4f4a-42bd-adc0-d433c519b28c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1688.813275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fcc9efb1-aa91-4c46-b085-6622bc8531bf tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Lock "30c8aea5-4f4a-42bd-adc0-d433c519b28c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1689.208347] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updated VIF entry in instance network info cache for port b08400e0-1bdd-40ef-8488-0c78d965efbd. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1689.211700] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [{"id": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "address": "fa:16:3e:7a:c9:2f", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb08400e0-1b", "ovs_interfaceid": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "address": "fa:16:3e:41:e7:4a", "network": {"id": "e5132112-9400-4b87-95f6-8b79c9c862c1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279062574", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa12fa8d-06", "ovs_interfaceid": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eaed692c-1ccf-4db0-b620-793548f40355", "address": "fa:16:3e:02:e1:a4", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaed692c-1c", "ovs_interfaceid": "eaed692c-1ccf-4db0-b620-793548f40355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.225657] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1689.226119] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1689.226201] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Releasing lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1689.235026] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-vif-plugged-aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1689.235026] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] No waiting events found dispatching network-vif-plugged-aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1689.235026] env[61964]: WARNING nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received unexpected event network-vif-plugged-aa12fa8d-0619-420a-acf8-61bc76c4aacd for instance with vm_state building and task_state deleting. [ 1689.235026] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-changed-aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1689.235026] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing instance network info cache due to event network-changed-aa12fa8d-0619-420a-acf8-61bc76c4aacd. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquiring lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1689.235026] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquired lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1689.235026] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing network info cache for port aa12fa8d-0619-420a-acf8-61bc76c4aacd {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1689.379843] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.986176] env[61964]: DEBUG nova.compute.manager [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-changed-eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1689.987011] env[61964]: DEBUG nova.compute.manager [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing instance network info cache due to event network-changed-eaed692c-1ccf-4db0-b620-793548f40355. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1689.987011] env[61964]: DEBUG oslo_concurrency.lockutils [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] Acquiring lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1690.100233] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updated VIF entry in instance network info cache for port aa12fa8d-0619-420a-acf8-61bc76c4aacd. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1690.100233] env[61964]: DEBUG nova.network.neutron [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [{"id": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "address": "fa:16:3e:7a:c9:2f", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb08400e0-1b", "ovs_interfaceid": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "address": "fa:16:3e:41:e7:4a", "network": {"id": "e5132112-9400-4b87-95f6-8b79c9c862c1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279062574", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa12fa8d-06", "ovs_interfaceid": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eaed692c-1ccf-4db0-b620-793548f40355", "address": "fa:16:3e:02:e1:a4", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaed692c-1c", "ovs_interfaceid": "eaed692c-1ccf-4db0-b620-793548f40355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.117022] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Releasing lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1690.117022] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received event network-vif-plugged-eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1690.117022] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1690.117022] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1690.117022] env[61964]: DEBUG oslo_concurrency.lockutils [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1690.117022] env[61964]: DEBUG nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] No waiting events found dispatching network-vif-plugged-eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1690.117022] env[61964]: WARNING nova.compute.manager [req-41db6a16-0b8d-4364-868b-b77d13e12859 req-6efcbdf7-fa62-4063-8523-96082e6980c7 service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Received unexpected event network-vif-plugged-eaed692c-1ccf-4db0-b620-793548f40355 for instance with vm_state building and task_state deleting. [ 1690.117022] env[61964]: DEBUG oslo_concurrency.lockutils [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] Acquired lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1690.117022] env[61964]: DEBUG nova.network.neutron [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Refreshing network info cache for port eaed692c-1ccf-4db0-b620-793548f40355 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1690.383988] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.384265] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.384393] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1690.979695] env[61964]: DEBUG nova.network.neutron [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updated VIF entry in instance network info cache for port eaed692c-1ccf-4db0-b620-793548f40355. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1690.980164] env[61964]: DEBUG nova.network.neutron [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [{"id": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "address": "fa:16:3e:7a:c9:2f", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb08400e0-1b", "ovs_interfaceid": "b08400e0-1bdd-40ef-8488-0c78d965efbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "address": "fa:16:3e:41:e7:4a", "network": {"id": "e5132112-9400-4b87-95f6-8b79c9c862c1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279062574", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa12fa8d-06", "ovs_interfaceid": "aa12fa8d-0619-420a-acf8-61bc76c4aacd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eaed692c-1ccf-4db0-b620-793548f40355", "address": "fa:16:3e:02:e1:a4", "network": {"id": "1e191c5b-71c6-4f3b-b83c-bbc0fb99f383", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-633016384", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.203", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaed692c-1c", "ovs_interfaceid": "eaed692c-1ccf-4db0-b620-793548f40355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.994140] env[61964]: DEBUG oslo_concurrency.lockutils [req-4150c2aa-9120-427d-9833-1a2b16965bdf req-7757bbe1-4525-4e67-9c1c-55d535e549bb service nova] Releasing lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1691.383833] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.755750] env[61964]: DEBUG oslo_concurrency.lockutils [None req-51e51f31-7000-4235-b2ba-31f3da799a4c tempest-ServerPasswordTestJSON-1096031617 tempest-ServerPasswordTestJSON-1096031617-project-member] Acquiring lock "05b4962d-2a56-40b2-a58d-9dd178160e3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1691.755750] env[61964]: DEBUG oslo_concurrency.lockutils [None req-51e51f31-7000-4235-b2ba-31f3da799a4c tempest-ServerPasswordTestJSON-1096031617 tempest-ServerPasswordTestJSON-1096031617-project-member] Lock "05b4962d-2a56-40b2-a58d-9dd178160e3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1692.386164] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.386164] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.404044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1692.404044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1692.404044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1692.404044] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1692.404044] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217e7afc-6ce8-4d88-98f5-fa295b02f89c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.413639] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0d30f3-5441-4105-ada3-225066bcf73d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.431024] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97eb0916-e6e4-4707-a2cf-80d225adb099 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.437933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e1e092-f773-4f66-b236-00209dac2cde {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.479605] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181383MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1692.479763] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1692.479964] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1692.664086] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c5dd385e-2447-4539-aed1-81d957076f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664245] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664368] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664494] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664638] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664755] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664869] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.664980] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.665104] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.665266] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1692.683979] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 571e9cdb-28ac-43de-a0ed-45458f12d68d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.693916] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ad4f562-805b-48e0-99ae-53934da0af4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.704840] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 068818f5-6df6-4de7-8158-c4f8bf11bb9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.719238] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9fa91cf5-e051-4136-bdd2-66beb12a8cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.734394] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance be0bc30b-c63d-4b33-9668-bbcd7d889f79 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.746567] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.759256] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 02a9b7be-f0a7-42f0-ac71-860e753f9408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.770541] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ece2c65c-60f9-4a4e-b135-f79d7adb188e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.781696] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.793779] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.804174] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.817963] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 26741651-12c2-4ef0-bbe4-37e981f9a7f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.829492] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.842453] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3453eda4-41f7-4558-a2cc-9dbce697c4e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.851876] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 30c8aea5-4f4a-42bd-adc0-d433c519b28c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.862029] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 05b4962d-2a56-40b2-a58d-9dd178160e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.862421] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1692.862624] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1692.880408] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1692.900033] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1692.900033] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1692.915418] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1692.944055] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1693.330203] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f277f2e9-af61-4ad3-bb95-7702237aeb95 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.337872] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068603c5-3f7d-401d-88b3-77a6889c37ef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.379423] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47569876-e818-438c-9979-d98b2520e7e4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.388409] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e474c4c9-3adf-4b40-b57f-ce86ef8b74b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.403730] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1693.412428] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1693.430780] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1693.431036] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.951s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1693.431524] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.431708] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1693.446292] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1693.868971] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2a01269d-4a8c-4e14-87e0-a34f26ea1472 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Acquiring lock "0ac28fb7-0325-43ee-9bb2-fac4e99a71e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1693.869268] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2a01269d-4a8c-4e14-87e0-a34f26ea1472 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Lock "0ac28fb7-0325-43ee-9bb2-fac4e99a71e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1694.384247] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1694.384438] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1698.383758] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1704.675869] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bb664791-d73d-4679-b21e-8ce4644cc9f7 tempest-ServerMetadataTestJSON-2041899630 tempest-ServerMetadataTestJSON-2041899630-project-member] Acquiring lock "0ffde9cd-fb06-4ff5-90d7-5173178aa699" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1704.676295] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bb664791-d73d-4679-b21e-8ce4644cc9f7 tempest-ServerMetadataTestJSON-2041899630 tempest-ServerMetadataTestJSON-2041899630-project-member] Lock "0ffde9cd-fb06-4ff5-90d7-5173178aa699" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.658011] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6094d48a-2f25-4259-96e9-492b451d5676 tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Acquiring lock "c067782a-36ce-4e03-888a-12a15dcd68c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1708.658346] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6094d48a-2f25-4259-96e9-492b451d5676 tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Lock "c067782a-36ce-4e03-888a-12a15dcd68c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1714.911953] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dbcc23d1-078b-41ec-b6cf-0175f061dc4b tempest-ServerActionsV293TestJSON-1598391829 tempest-ServerActionsV293TestJSON-1598391829-project-member] Acquiring lock "dae54132-828d-4f84-b0d1-2b3b568882dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1714.912357] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dbcc23d1-078b-41ec-b6cf-0175f061dc4b tempest-ServerActionsV293TestJSON-1598391829 tempest-ServerActionsV293TestJSON-1598391829-project-member] Lock "dae54132-828d-4f84-b0d1-2b3b568882dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1715.813678] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3cb9db90-2e0a-46d6-a9db-aeb009c1e856 tempest-ServerTagsTestJSON-1436814176 tempest-ServerTagsTestJSON-1436814176-project-member] Acquiring lock "19d0d400-d8c1-4348-aef3-8de9c94af5e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1715.813678] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3cb9db90-2e0a-46d6-a9db-aeb009c1e856 tempest-ServerTagsTestJSON-1436814176 tempest-ServerTagsTestJSON-1436814176-project-member] Lock "19d0d400-d8c1-4348-aef3-8de9c94af5e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1720.049885] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eefbee43-8dd7-48e4-9688-b8c484edbcff tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Acquiring lock "9f16e97b-269e-4a39-8816-ee1a4d911450" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1720.050526] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eefbee43-8dd7-48e4-9688-b8c484edbcff tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "9f16e97b-269e-4a39-8816-ee1a4d911450" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1722.025133] env[61964]: WARNING oslo_vmware.rw_handles [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1722.025133] env[61964]: ERROR oslo_vmware.rw_handles [ 1722.025133] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1722.025133] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1722.025133] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Copying Virtual Disk [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/958abc8f-deb5-4c92-bd62-cbb435f501c3/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1722.026662] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-674f0f37-e93c-4d01-8694-ffa2e091756f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.036564] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1722.036564] env[61964]: value = "task-1688623" [ 1722.036564] env[61964]: _type = "Task" [ 1722.036564] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.044657] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.547735] env[61964]: DEBUG oslo_vmware.exceptions [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1722.547735] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1722.548102] env[61964]: ERROR nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.548102] env[61964]: Faults: ['InvalidArgument'] [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Traceback (most recent call last): [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] yield resources [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self.driver.spawn(context, instance, image_meta, [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self._fetch_image_if_missing(context, vi) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] image_cache(vi, tmp_image_ds_loc) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] vm_util.copy_virtual_disk( [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] session._wait_for_task(vmdk_copy_task) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return self.wait_for_task(task_ref) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return evt.wait() [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] result = hub.switch() [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return self.greenlet.switch() [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self.f(*self.args, **self.kw) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] raise exceptions.translate_fault(task_info.error) [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Faults: ['InvalidArgument'] [ 1722.548102] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] [ 1722.549204] env[61964]: INFO nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Terminating instance [ 1722.549993] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1722.550226] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.550875] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1722.551078] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1722.551297] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceee15cd-79b4-4602-9b52-3a976a551a2e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.553713] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcbe028-1265-4e59-a0e9-9ade19a52347 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.560616] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1722.560816] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-129905b4-0a77-41c9-8387-6c70f0ab8ff7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.563096] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.563268] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1722.564319] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f5f809-c764-4d92-9144-ea7bb0df2f9d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.569471] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for the task: (returnval){ [ 1722.569471] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5231cfbb-c6f7-c0ee-8162-0cc294c7a6de" [ 1722.569471] env[61964]: _type = "Task" [ 1722.569471] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.576757] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5231cfbb-c6f7-c0ee-8162-0cc294c7a6de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.633351] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1722.633608] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1722.633783] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleting the datastore file [datastore1] c5dd385e-2447-4539-aed1-81d957076f5f {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.634194] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b97b5c3-3c1c-466d-911c-127f8ba441aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.640597] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1722.640597] env[61964]: value = "task-1688625" [ 1722.640597] env[61964]: _type = "Task" [ 1722.640597] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.648360] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.080302] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1723.080598] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Creating directory with path [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1723.080800] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48bbdeab-03a6-4e1c-a26a-3c32e396986a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.092397] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Created directory with path [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1723.092605] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Fetch image to [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1723.092769] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1723.093592] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23040a2b-7e06-49a3-98ec-46094c6a4f88 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.101009] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a976f0-ac26-4c3d-8334-9c4f85f74de9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.110579] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb065692-6f9c-4560-8d11-c3d0a098a8b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.145413] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb88e22-eec0-4d19-be19-d6c833842087 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.153532] env[61964]: DEBUG oslo_vmware.api [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063648} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.155260] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1723.155450] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1723.155662] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1723.155877] env[61964]: INFO nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1723.158265] env[61964]: DEBUG nova.compute.claims [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1723.158436] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1723.158647] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1723.161126] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-78a40c6b-09bb-45d0-b20f-c74212dc65ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.248874] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1723.311812] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1723.380894] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1723.380894] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1723.581029] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827342e3-cff7-4cf6-86c1-1ae9589e6796 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.588635] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89696091-150f-4210-99e5-ebf51f1ec8a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.620125] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461b734f-5dc5-4648-91c8-b03226aac4da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.627242] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf7b5e2-9948-4d6f-ae16-7438b4c336de {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.641350] env[61964]: DEBUG nova.compute.provider_tree [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.650024] env[61964]: DEBUG nova.scheduler.client.report [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1723.668695] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.510s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1723.669261] env[61964]: ERROR nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1723.669261] env[61964]: Faults: ['InvalidArgument'] [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Traceback (most recent call last): [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self.driver.spawn(context, instance, image_meta, [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self._fetch_image_if_missing(context, vi) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] image_cache(vi, tmp_image_ds_loc) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] vm_util.copy_virtual_disk( [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] session._wait_for_task(vmdk_copy_task) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return self.wait_for_task(task_ref) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return evt.wait() [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] result = hub.switch() [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] return self.greenlet.switch() [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] self.f(*self.args, **self.kw) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] raise exceptions.translate_fault(task_info.error) [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Faults: ['InvalidArgument'] [ 1723.669261] env[61964]: ERROR nova.compute.manager [instance: c5dd385e-2447-4539-aed1-81d957076f5f] [ 1723.670278] env[61964]: DEBUG nova.compute.utils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1723.671774] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Build of instance c5dd385e-2447-4539-aed1-81d957076f5f was re-scheduled: A specified parameter was not correct: fileType [ 1723.671774] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1723.672164] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1723.672338] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1723.672488] env[61964]: DEBUG nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1723.672647] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1724.032716] env[61964]: DEBUG nova.network.neutron [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.049636] env[61964]: INFO nova.compute.manager [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: c5dd385e-2447-4539-aed1-81d957076f5f] Took 0.38 seconds to deallocate network for instance. [ 1724.152270] env[61964]: INFO nova.scheduler.client.report [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleted allocations for instance c5dd385e-2447-4539-aed1-81d957076f5f [ 1724.176096] env[61964]: DEBUG oslo_concurrency.lockutils [None req-454de5ca-ed7a-4356-b77b-24c55740a135 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "c5dd385e-2447-4539-aed1-81d957076f5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 277.725s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.188217] env[61964]: DEBUG nova.compute.manager [None req-5641a49c-d0f5-4836-bd1a-9385e825a26b tempest-AttachInterfacesUnderV243Test-2126647480 tempest-AttachInterfacesUnderV243Test-2126647480-project-member] [instance: 5bafa3d0-3619-4d7e-b625-9b389394738f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.214424] env[61964]: DEBUG nova.compute.manager [None req-5641a49c-d0f5-4836-bd1a-9385e825a26b tempest-AttachInterfacesUnderV243Test-2126647480 tempest-AttachInterfacesUnderV243Test-2126647480-project-member] [instance: 5bafa3d0-3619-4d7e-b625-9b389394738f] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.249373] env[61964]: DEBUG oslo_concurrency.lockutils [None req-5641a49c-d0f5-4836-bd1a-9385e825a26b tempest-AttachInterfacesUnderV243Test-2126647480 tempest-AttachInterfacesUnderV243Test-2126647480-project-member] Lock "5bafa3d0-3619-4d7e-b625-9b389394738f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 244.800s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.262494] env[61964]: DEBUG nova.compute.manager [None req-3d15b390-6556-4a81-9fb9-45c708e8da48 tempest-ServersV294TestFqdnHostnames-1706479718 tempest-ServersV294TestFqdnHostnames-1706479718-project-member] [instance: 40aa82a7-09f6-4e99-bd8b-32c500aac259] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.304999] env[61964]: DEBUG nova.compute.manager [None req-3d15b390-6556-4a81-9fb9-45c708e8da48 tempest-ServersV294TestFqdnHostnames-1706479718 tempest-ServersV294TestFqdnHostnames-1706479718-project-member] [instance: 40aa82a7-09f6-4e99-bd8b-32c500aac259] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.330994] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3d15b390-6556-4a81-9fb9-45c708e8da48 tempest-ServersV294TestFqdnHostnames-1706479718 tempest-ServersV294TestFqdnHostnames-1706479718-project-member] Lock "40aa82a7-09f6-4e99-bd8b-32c500aac259" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.826s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.340792] env[61964]: DEBUG nova.compute.manager [None req-a68af9f7-b3e4-41ad-8dbb-a8a4d2d14b8c tempest-TenantUsagesTestJSON-1307804085 tempest-TenantUsagesTestJSON-1307804085-project-member] [instance: daea8bd9-5d0a-40e3-9353-28785d5fd7ca] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.365490] env[61964]: DEBUG nova.compute.manager [None req-a68af9f7-b3e4-41ad-8dbb-a8a4d2d14b8c tempest-TenantUsagesTestJSON-1307804085 tempest-TenantUsagesTestJSON-1307804085-project-member] [instance: daea8bd9-5d0a-40e3-9353-28785d5fd7ca] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.386392] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a68af9f7-b3e4-41ad-8dbb-a8a4d2d14b8c tempest-TenantUsagesTestJSON-1307804085 tempest-TenantUsagesTestJSON-1307804085-project-member] Lock "daea8bd9-5d0a-40e3-9353-28785d5fd7ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.244s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.395376] env[61964]: DEBUG nova.compute.manager [None req-8679ef6b-aeb2-49fa-9e54-c76e1d9651f5 tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] [instance: 24dfefdd-8357-4f82-aaf0-9f6e6907291f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.419387] env[61964]: DEBUG nova.compute.manager [None req-8679ef6b-aeb2-49fa-9e54-c76e1d9651f5 tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] [instance: 24dfefdd-8357-4f82-aaf0-9f6e6907291f] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.440673] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8679ef6b-aeb2-49fa-9e54-c76e1d9651f5 tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Lock "24dfefdd-8357-4f82-aaf0-9f6e6907291f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.298s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.448556] env[61964]: DEBUG nova.compute.manager [None req-4d99a370-109b-4227-a2b5-4d1516b8703c tempest-ServerDiagnosticsV248Test-172020440 tempest-ServerDiagnosticsV248Test-172020440-project-member] [instance: 5413b152-2cd7-41c4-bf18-bd3d51971a6e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.469727] env[61964]: DEBUG nova.compute.manager [None req-4d99a370-109b-4227-a2b5-4d1516b8703c tempest-ServerDiagnosticsV248Test-172020440 tempest-ServerDiagnosticsV248Test-172020440-project-member] [instance: 5413b152-2cd7-41c4-bf18-bd3d51971a6e] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.491000] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4d99a370-109b-4227-a2b5-4d1516b8703c tempest-ServerDiagnosticsV248Test-172020440 tempest-ServerDiagnosticsV248Test-172020440-project-member] Lock "5413b152-2cd7-41c4-bf18-bd3d51971a6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.971s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.500323] env[61964]: DEBUG nova.compute.manager [None req-0e578cae-a84d-4f4e-a058-70b3877021f7 tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] [instance: 571e9cdb-28ac-43de-a0ed-45458f12d68d] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.524512] env[61964]: DEBUG nova.compute.manager [None req-0e578cae-a84d-4f4e-a058-70b3877021f7 tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] [instance: 571e9cdb-28ac-43de-a0ed-45458f12d68d] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.544699] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e578cae-a84d-4f4e-a058-70b3877021f7 tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Lock "571e9cdb-28ac-43de-a0ed-45458f12d68d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.115s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.554513] env[61964]: DEBUG nova.compute.manager [None req-287e749f-5edc-4bfe-8634-9afd70738b1e tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] [instance: ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.579444] env[61964]: DEBUG nova.compute.manager [None req-287e749f-5edc-4bfe-8634-9afd70738b1e tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] [instance: ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.600031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-287e749f-5edc-4bfe-8634-9afd70738b1e tempest-ServersAdminTestJSON-2007548163 tempest-ServersAdminTestJSON-2007548163-project-member] Lock "ebe42f1a-5eb9-4b14-8de0-3b1e8e54a158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.085s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.610516] env[61964]: DEBUG nova.compute.manager [None req-8d635c1a-b3e5-4a82-a8e6-23f14c77c5b8 tempest-ServerActionsTestOtherB-1760892883 tempest-ServerActionsTestOtherB-1760892883-project-member] [instance: 0ad4f562-805b-48e0-99ae-53934da0af4e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.635719] env[61964]: DEBUG nova.compute.manager [None req-8d635c1a-b3e5-4a82-a8e6-23f14c77c5b8 tempest-ServerActionsTestOtherB-1760892883 tempest-ServerActionsTestOtherB-1760892883-project-member] [instance: 0ad4f562-805b-48e0-99ae-53934da0af4e] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.659979] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8d635c1a-b3e5-4a82-a8e6-23f14c77c5b8 tempest-ServerActionsTestOtherB-1760892883 tempest-ServerActionsTestOtherB-1760892883-project-member] Lock "0ad4f562-805b-48e0-99ae-53934da0af4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.121s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.669916] env[61964]: DEBUG nova.compute.manager [None req-02930160-1860-41b5-a23a-cb1bdd129452 tempest-ServersTestManualDisk-2090240948 tempest-ServersTestManualDisk-2090240948-project-member] [instance: 068818f5-6df6-4de7-8158-c4f8bf11bb9d] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.694613] env[61964]: DEBUG nova.compute.manager [None req-02930160-1860-41b5-a23a-cb1bdd129452 tempest-ServersTestManualDisk-2090240948 tempest-ServersTestManualDisk-2090240948-project-member] [instance: 068818f5-6df6-4de7-8158-c4f8bf11bb9d] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.715189] env[61964]: DEBUG oslo_concurrency.lockutils [None req-02930160-1860-41b5-a23a-cb1bdd129452 tempest-ServersTestManualDisk-2090240948 tempest-ServersTestManualDisk-2090240948-project-member] Lock "068818f5-6df6-4de7-8158-c4f8bf11bb9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.968s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.723726] env[61964]: DEBUG nova.compute.manager [None req-dc31d29a-10d7-4ecf-8295-b9ba09b46025 tempest-ServerAddressesNegativeTestJSON-963656082 tempest-ServerAddressesNegativeTestJSON-963656082-project-member] [instance: 9fa91cf5-e051-4136-bdd2-66beb12a8cdf] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.746979] env[61964]: DEBUG nova.compute.manager [None req-dc31d29a-10d7-4ecf-8295-b9ba09b46025 tempest-ServerAddressesNegativeTestJSON-963656082 tempest-ServerAddressesNegativeTestJSON-963656082-project-member] [instance: 9fa91cf5-e051-4136-bdd2-66beb12a8cdf] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.765909] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc31d29a-10d7-4ecf-8295-b9ba09b46025 tempest-ServerAddressesNegativeTestJSON-963656082 tempest-ServerAddressesNegativeTestJSON-963656082-project-member] Lock "9fa91cf5-e051-4136-bdd2-66beb12a8cdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.157s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.776680] env[61964]: DEBUG nova.compute.manager [None req-be4d9c64-bdd4-484f-b03f-7b50e972412b tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] [instance: be0bc30b-c63d-4b33-9668-bbcd7d889f79] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.801027] env[61964]: DEBUG nova.compute.manager [None req-be4d9c64-bdd4-484f-b03f-7b50e972412b tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] [instance: be0bc30b-c63d-4b33-9668-bbcd7d889f79] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.824600] env[61964]: DEBUG oslo_concurrency.lockutils [None req-be4d9c64-bdd4-484f-b03f-7b50e972412b tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Lock "be0bc30b-c63d-4b33-9668-bbcd7d889f79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.982s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.834876] env[61964]: DEBUG nova.compute.manager [None req-833f3f57-3b5e-4684-be64-5dcfb0142f41 tempest-ServerAddressesTestJSON-998688652 tempest-ServerAddressesTestJSON-998688652-project-member] [instance: 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.868927] env[61964]: DEBUG nova.compute.manager [None req-833f3f57-3b5e-4684-be64-5dcfb0142f41 tempest-ServerAddressesTestJSON-998688652 tempest-ServerAddressesTestJSON-998688652-project-member] [instance: 1fffa4d3-fe6b-47c9-ad52-db62c0c88a03] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.890060] env[61964]: DEBUG oslo_concurrency.lockutils [None req-833f3f57-3b5e-4684-be64-5dcfb0142f41 tempest-ServerAddressesTestJSON-998688652 tempest-ServerAddressesTestJSON-998688652-project-member] Lock "1fffa4d3-fe6b-47c9-ad52-db62c0c88a03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.015s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.898907] env[61964]: DEBUG nova.compute.manager [None req-42d4c102-91f9-4666-a2a5-72894a859014 tempest-ServerRescueTestJSONUnderV235-1639241418 tempest-ServerRescueTestJSONUnderV235-1639241418-project-member] [instance: 02a9b7be-f0a7-42f0-ac71-860e753f9408] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.923603] env[61964]: DEBUG nova.compute.manager [None req-42d4c102-91f9-4666-a2a5-72894a859014 tempest-ServerRescueTestJSONUnderV235-1639241418 tempest-ServerRescueTestJSONUnderV235-1639241418-project-member] [instance: 02a9b7be-f0a7-42f0-ac71-860e753f9408] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1724.945760] env[61964]: DEBUG oslo_concurrency.lockutils [None req-42d4c102-91f9-4666-a2a5-72894a859014 tempest-ServerRescueTestJSONUnderV235-1639241418 tempest-ServerRescueTestJSONUnderV235-1639241418-project-member] Lock "02a9b7be-f0a7-42f0-ac71-860e753f9408" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.756s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1724.955475] env[61964]: DEBUG nova.compute.manager [None req-d0208860-679b-4290-be2b-e4c8368321b9 tempest-ImagesOneServerNegativeTestJSON-637618724 tempest-ImagesOneServerNegativeTestJSON-637618724-project-member] [instance: ece2c65c-60f9-4a4e-b135-f79d7adb188e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1724.980490] env[61964]: DEBUG nova.compute.manager [None req-d0208860-679b-4290-be2b-e4c8368321b9 tempest-ImagesOneServerNegativeTestJSON-637618724 tempest-ImagesOneServerNegativeTestJSON-637618724-project-member] [instance: ece2c65c-60f9-4a4e-b135-f79d7adb188e] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1725.002731] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0208860-679b-4290-be2b-e4c8368321b9 tempest-ImagesOneServerNegativeTestJSON-637618724 tempest-ImagesOneServerNegativeTestJSON-637618724-project-member] Lock "ece2c65c-60f9-4a4e-b135-f79d7adb188e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.680s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1725.012144] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1725.066276] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1725.066534] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1725.068212] env[61964]: INFO nova.compute.claims [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1725.401695] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d03705-2560-4031-9d4e-b2142c9efc34 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.409432] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc6886e-a942-4a8a-9ea7-bf468b5df105 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.439611] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9fe6f0-77f1-4b0b-964c-a1f6f11b703b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.447028] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f97f2f9-7e28-4ff0-be57-a7af13888061 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.460869] env[61964]: DEBUG nova.compute.provider_tree [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.469638] env[61964]: DEBUG nova.scheduler.client.report [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1725.485854] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.419s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1725.486355] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1725.517574] env[61964]: DEBUG nova.compute.utils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1725.519443] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1725.519443] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1725.526956] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1725.581603] env[61964]: DEBUG nova.policy [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e039833386e4062b055fa55b54153da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fff296ea738049c9895c9e30676a446e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1725.588032] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1725.614230] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1725.614587] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1725.614842] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1725.615073] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1725.615229] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1725.615374] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1725.615587] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1725.615740] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1725.615904] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1725.616076] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1725.616250] env[61964]: DEBUG nova.virt.hardware [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1725.617168] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9049413-e3f1-4759-803b-021c92884089 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.625859] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8512e572-0e9a-4a0e-a749-4bf43edf8a9e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.219032] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Successfully created port: ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1726.955791] env[61964]: DEBUG nova.compute.manager [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Received event network-vif-plugged-ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1726.956075] env[61964]: DEBUG oslo_concurrency.lockutils [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] Acquiring lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1726.956264] env[61964]: DEBUG oslo_concurrency.lockutils [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1726.956444] env[61964]: DEBUG oslo_concurrency.lockutils [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1726.956607] env[61964]: DEBUG nova.compute.manager [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] No waiting events found dispatching network-vif-plugged-ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1726.956762] env[61964]: WARNING nova.compute.manager [req-cd553f7f-30e0-47d0-ac2a-1b975c7526be req-4102fecf-d906-43ce-b510-dcd09105933b service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Received unexpected event network-vif-plugged-ee787f3c-895a-4c2f-aee0-f64835a6a8b5 for instance with vm_state building and task_state spawning. [ 1727.089543] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Successfully updated port: ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1727.103392] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1727.103631] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquired lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1727.103856] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1727.167016] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1727.445295] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Updating instance_info_cache with network_info: [{"id": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "address": "fa:16:3e:5d:29:8c", "network": {"id": "bd74e37c-3295-4377-b07a-2077a84777d5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-87505054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff296ea738049c9895c9e30676a446e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee787f3c-89", "ovs_interfaceid": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.463718] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Releasing lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1727.464057] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance network_info: |[{"id": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "address": "fa:16:3e:5d:29:8c", "network": {"id": "bd74e37c-3295-4377-b07a-2077a84777d5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-87505054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff296ea738049c9895c9e30676a446e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee787f3c-89", "ovs_interfaceid": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1727.464456] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:29:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee787f3c-895a-4c2f-aee0-f64835a6a8b5', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1727.472051] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Creating folder: Project (fff296ea738049c9895c9e30676a446e). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1727.472827] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3aa10691-aa58-4e8b-81c9-f85920abafca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.484239] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Created folder: Project (fff296ea738049c9895c9e30676a446e) in parent group-v351942. [ 1727.484425] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Creating folder: Instances. Parent ref: group-v351990. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1727.484654] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-028eb352-4a86-4eeb-9ea9-15f59416ad0b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.493320] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Created folder: Instances in parent group-v351990. [ 1727.493557] env[61964]: DEBUG oslo.service.loopingcall [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.493738] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1727.493944] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcbfe6af-0498-4cc8-952f-f10b97825fc1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.512330] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1727.512330] env[61964]: value = "task-1688628" [ 1727.512330] env[61964]: _type = "Task" [ 1727.512330] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.520127] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688628, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.021736] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688628, 'name': CreateVM_Task, 'duration_secs': 0.436687} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.022075] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1728.022805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1728.023068] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1728.023407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1728.023799] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dea9dc1-01f7-40b9-ae00-78650903a956 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.028720] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for the task: (returnval){ [ 1728.028720] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b35b71-2e5b-b4a3-a41e-a8dc16ff83f5" [ 1728.028720] env[61964]: _type = "Task" [ 1728.028720] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.037844] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b35b71-2e5b-b4a3-a41e-a8dc16ff83f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.539254] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1728.539584] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1728.539797] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1728.983235] env[61964]: DEBUG nova.compute.manager [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Received event network-changed-ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1728.983473] env[61964]: DEBUG nova.compute.manager [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Refreshing instance network info cache due to event network-changed-ee787f3c-895a-4c2f-aee0-f64835a6a8b5. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1728.983727] env[61964]: DEBUG oslo_concurrency.lockutils [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] Acquiring lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1728.983900] env[61964]: DEBUG oslo_concurrency.lockutils [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] Acquired lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1728.984079] env[61964]: DEBUG nova.network.neutron [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Refreshing network info cache for port ee787f3c-895a-4c2f-aee0-f64835a6a8b5 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1729.353450] env[61964]: DEBUG nova.network.neutron [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Updated VIF entry in instance network info cache for port ee787f3c-895a-4c2f-aee0-f64835a6a8b5. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1729.353908] env[61964]: DEBUG nova.network.neutron [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Updating instance_info_cache with network_info: [{"id": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "address": "fa:16:3e:5d:29:8c", "network": {"id": "bd74e37c-3295-4377-b07a-2077a84777d5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-87505054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fff296ea738049c9895c9e30676a446e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee787f3c-89", "ovs_interfaceid": "ee787f3c-895a-4c2f-aee0-f64835a6a8b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.363684] env[61964]: DEBUG oslo_concurrency.lockutils [req-c953e0eb-8e69-4223-b427-4d0645f5e04d req-d1aa776b-d669-4037-820a-167760790c7d service nova] Releasing lock "refresh_cache-032f2d6d-04c3-4210-a8d0-1c325a304a88" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1732.241488] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1744.391631] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.391900] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1744.391954] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1744.415086] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415256] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415390] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415617] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415679] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415761] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.415883] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.416009] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.416137] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.416256] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1744.416372] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1746.383635] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.384316] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.378634] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.384015] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.384296] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.384419] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1752.384589] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.384267] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.384624] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.396417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.396634] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.396797] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.396967] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1754.398125] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df2dc03-4fa2-49ce-bb19-64177e172cd9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.407049] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b362b8-0836-483c-84a4-cb45d595134c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.422132] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdc9ea4-565d-48b9-a841-8df097a3c9a8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.428447] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c16836-28cf-42ce-aff2-effc523677b7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.456868] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181336MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1754.457041] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.457254] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.529540] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8993737-d2ef-4987-8c91-d1320771434a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.529715] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.529841] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.529962] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530094] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530214] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530334] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530447] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530563] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.530664] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1754.542049] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.552325] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.562395] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 26741651-12c2-4ef0-bbe4-37e981f9a7f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.573140] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.582278] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3453eda4-41f7-4558-a2cc-9dbce697c4e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.591634] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 30c8aea5-4f4a-42bd-adc0-d433c519b28c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.600821] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 05b4962d-2a56-40b2-a58d-9dd178160e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.609501] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ac28fb7-0325-43ee-9bb2-fac4e99a71e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.618570] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ffde9cd-fb06-4ff5-90d7-5173178aa699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.627512] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c067782a-36ce-4e03-888a-12a15dcd68c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.636537] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance dae54132-828d-4f84-b0d1-2b3b568882dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.645485] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 19d0d400-d8c1-4348-aef3-8de9c94af5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.655390] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9f16e97b-269e-4a39-8816-ee1a4d911450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.655625] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1754.655772] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1754.918275] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9020b34-726e-4207-abe1-9599b0fd19ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.927040] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd09a7e-1487-4d1d-8546-e90443008f3b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.955516] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62576ca2-4cbd-4444-8042-e3549eed1490 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.962689] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdad4584-7303-4a02-9e07-79a5a704e3e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.975232] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.985342] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1754.999267] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1754.999417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.542s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1759.994943] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.680303] env[61964]: WARNING oslo_vmware.rw_handles [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1772.680303] env[61964]: ERROR oslo_vmware.rw_handles [ 1772.680824] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1772.682817] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1772.683105] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Copying Virtual Disk [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/ef5dd63a-d323-4ecb-b541-669fb1b423cd/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1772.683415] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07a28040-4fbb-4c1e-bed7-49ef8d58ac48 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.690773] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for the task: (returnval){ [ 1772.690773] env[61964]: value = "task-1688629" [ 1772.690773] env[61964]: _type = "Task" [ 1772.690773] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.698863] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Task: {'id': task-1688629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.201576] env[61964]: DEBUG oslo_vmware.exceptions [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1773.201912] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1773.202430] env[61964]: ERROR nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1773.202430] env[61964]: Faults: ['InvalidArgument'] [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] Traceback (most recent call last): [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] yield resources [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self.driver.spawn(context, instance, image_meta, [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self._fetch_image_if_missing(context, vi) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] image_cache(vi, tmp_image_ds_loc) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] vm_util.copy_virtual_disk( [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] session._wait_for_task(vmdk_copy_task) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return self.wait_for_task(task_ref) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return evt.wait() [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] result = hub.switch() [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return self.greenlet.switch() [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self.f(*self.args, **self.kw) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] raise exceptions.translate_fault(task_info.error) [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] Faults: ['InvalidArgument'] [ 1773.202430] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] [ 1773.203174] env[61964]: INFO nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Terminating instance [ 1773.204356] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1773.204572] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1773.204813] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69a8dd6c-74f8-4201-932b-af24de73ffbb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.207011] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1773.207208] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1773.207934] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d98de0a-9429-4f08-94fa-fa4cb022877e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.214954] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1773.215212] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88a548c3-c22d-43b5-8546-d30f8c9f753d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.217527] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1773.217700] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1773.218673] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-411a11ae-5e9d-4afb-af04-fc3f8566dcde {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.223886] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1773.223886] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52458711-34a1-97dd-a2d7-f4f87240209c" [ 1773.223886] env[61964]: _type = "Task" [ 1773.223886] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.230871] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52458711-34a1-97dd-a2d7-f4f87240209c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.291450] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1773.291716] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1773.291948] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Deleting the datastore file [datastore1] b8993737-d2ef-4987-8c91-d1320771434a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1773.292266] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-037cfeb1-21c5-47c9-a60a-a9ea6a80af40 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.299614] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for the task: (returnval){ [ 1773.299614] env[61964]: value = "task-1688631" [ 1773.299614] env[61964]: _type = "Task" [ 1773.299614] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.306362] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Task: {'id': task-1688631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.734073] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1773.734354] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating directory with path [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1773.734585] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95ba5b22-4b88-4d09-85e5-a7c861e707e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.747488] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Created directory with path [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1773.747690] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Fetch image to [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1773.747857] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1773.748600] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1caaaa-b422-4a4d-9356-ace09dba8e8c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.754800] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93a8353-afd4-4767-9f75-1b53b802b474 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.763546] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac32514-51b9-4010-b86c-1afcc0515fd6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.792593] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d62cfce-b45c-41f9-ad2b-6ce5fc871cbf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.797983] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b7d2534f-ed30-43f2-974a-95462d8d85ab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.806689] env[61964]: DEBUG oslo_vmware.api [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Task: {'id': task-1688631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074414} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.806913] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1773.807105] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1773.807290] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1773.807485] env[61964]: INFO nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1773.809549] env[61964]: DEBUG nova.compute.claims [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1773.809713] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1773.809921] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1773.820773] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1773.873161] env[61964]: DEBUG oslo_vmware.rw_handles [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1773.932387] env[61964]: DEBUG oslo_vmware.rw_handles [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1773.932575] env[61964]: DEBUG oslo_vmware.rw_handles [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1774.186233] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c87075-4e36-4d38-b444-6445fdd3f1c5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.193580] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcac1267-59e9-43dd-a253-ce0ae5638064 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.224051] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8667d2c-33e3-4b19-9e4f-4b1b7680a696 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.230474] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaf53d3-f946-4aec-bec7-9e8038c5dc09 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.244010] env[61964]: DEBUG nova.compute.provider_tree [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.253230] env[61964]: DEBUG nova.scheduler.client.report [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1774.268342] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.458s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1774.268916] env[61964]: ERROR nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1774.268916] env[61964]: Faults: ['InvalidArgument'] [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] Traceback (most recent call last): [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self.driver.spawn(context, instance, image_meta, [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self._fetch_image_if_missing(context, vi) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] image_cache(vi, tmp_image_ds_loc) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] vm_util.copy_virtual_disk( [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] session._wait_for_task(vmdk_copy_task) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return self.wait_for_task(task_ref) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return evt.wait() [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] result = hub.switch() [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] return self.greenlet.switch() [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] self.f(*self.args, **self.kw) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] raise exceptions.translate_fault(task_info.error) [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] Faults: ['InvalidArgument'] [ 1774.268916] env[61964]: ERROR nova.compute.manager [instance: b8993737-d2ef-4987-8c91-d1320771434a] [ 1774.269803] env[61964]: DEBUG nova.compute.utils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1774.272112] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Build of instance b8993737-d2ef-4987-8c91-d1320771434a was re-scheduled: A specified parameter was not correct: fileType [ 1774.272112] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1774.272498] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1774.272667] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1774.272936] env[61964]: DEBUG nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1774.273133] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1774.652151] env[61964]: DEBUG nova.network.neutron [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.668065] env[61964]: INFO nova.compute.manager [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Took 0.39 seconds to deallocate network for instance. [ 1774.773665] env[61964]: INFO nova.scheduler.client.report [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Deleted allocations for instance b8993737-d2ef-4987-8c91-d1320771434a [ 1774.792787] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8529a4f6-9d19-4e91-ab4c-ff9db248f7d3 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 326.042s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1774.794191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 127.504s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1774.794417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Acquiring lock "b8993737-d2ef-4987-8c91-d1320771434a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1774.794625] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1774.794790] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1774.797099] env[61964]: INFO nova.compute.manager [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Terminating instance [ 1774.798676] env[61964]: DEBUG nova.compute.manager [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1774.798867] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1774.799367] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a5f1f8f-e485-4f83-8949-4e501cf7832c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.805610] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1774.813065] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfa4544-6103-4955-a755-164243ba880e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.841432] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8993737-d2ef-4987-8c91-d1320771434a could not be found. [ 1774.841673] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1774.841821] env[61964]: INFO nova.compute.manager [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1774.842087] env[61964]: DEBUG oslo.service.loopingcall [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.846413] env[61964]: DEBUG nova.compute.manager [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1774.846517] env[61964]: DEBUG nova.network.neutron [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1774.857971] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1774.858217] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1774.859633] env[61964]: INFO nova.compute.claims [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1774.877319] env[61964]: DEBUG nova.network.neutron [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.895670] env[61964]: INFO nova.compute.manager [-] [instance: b8993737-d2ef-4987-8c91-d1320771434a] Took 0.05 seconds to deallocate network for instance. [ 1775.024525] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b221ce15-8ab1-4d4f-a409-7dd7e6d05b66 tempest-VolumesAssistedSnapshotsTest-1458866141 tempest-VolumesAssistedSnapshotsTest-1458866141-project-member] Lock "b8993737-d2ef-4987-8c91-d1320771434a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1775.180428] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8718a8a-8200-4108-b299-d45abfb7088c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.188518] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddce032-300e-4942-a2ca-7c5666972797 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.218763] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0394316-af20-4423-8129-cf22b26d96f1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.225988] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b8a982-a264-4b69-94c7-e9555c276f77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.239218] env[61964]: DEBUG nova.compute.provider_tree [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1775.247920] env[61964]: DEBUG nova.scheduler.client.report [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1775.263548] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1775.263835] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1775.323954] env[61964]: DEBUG nova.compute.utils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1775.325309] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1775.325464] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1775.336039] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1775.399616] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1775.412996] env[61964]: DEBUG nova.policy [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7efaeca7b25e4b28b290f327f7f335c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3765bc7b039b4868a96b6ec336cb318a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1775.430026] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1775.434016] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1775.434016] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1775.434016] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1775.434016] env[61964]: DEBUG nova.virt.hardware [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1775.434016] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31588d4d-6051-4bc6-a34f-6e6c3df27c45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.441115] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbccb667-0bf3-412b-9867-3ac08b61254d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.776190] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Successfully created port: 5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1776.822955] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Successfully updated port: 5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1776.830472] env[61964]: DEBUG nova.compute.manager [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Received event network-vif-plugged-5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1776.830717] env[61964]: DEBUG oslo_concurrency.lockutils [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] Acquiring lock "2b69def4-b892-4d76-bfd2-841014f75098-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.831085] env[61964]: DEBUG oslo_concurrency.lockutils [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] Lock "2b69def4-b892-4d76-bfd2-841014f75098-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1776.831085] env[61964]: DEBUG oslo_concurrency.lockutils [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] Lock "2b69def4-b892-4d76-bfd2-841014f75098-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1776.831259] env[61964]: DEBUG nova.compute.manager [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] No waiting events found dispatching network-vif-plugged-5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1776.831418] env[61964]: WARNING nova.compute.manager [req-98c661bc-641c-44d7-b8ea-5be30d2ee8e7 req-55aee5dd-c0c5-462c-8984-69c5061d9091 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Received unexpected event network-vif-plugged-5f991013-217a-4331-b17a-1effa9f95380 for instance with vm_state building and task_state spawning. [ 1776.846425] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1776.846425] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1776.846425] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1776.897542] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1777.251894] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Updating instance_info_cache with network_info: [{"id": "5f991013-217a-4331-b17a-1effa9f95380", "address": "fa:16:3e:60:e6:8d", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f991013-21", "ovs_interfaceid": "5f991013-217a-4331-b17a-1effa9f95380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.263138] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1777.263596] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance network_info: |[{"id": "5f991013-217a-4331-b17a-1effa9f95380", "address": "fa:16:3e:60:e6:8d", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f991013-21", "ovs_interfaceid": "5f991013-217a-4331-b17a-1effa9f95380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1777.264435] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:e6:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f991013-217a-4331-b17a-1effa9f95380', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1777.273548] env[61964]: DEBUG oslo.service.loopingcall [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.274177] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1777.275073] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a75dcc2b-7302-41bd-b86d-c263895d83f4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.298813] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1777.298813] env[61964]: value = "task-1688632" [ 1777.298813] env[61964]: _type = "Task" [ 1777.298813] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.309729] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688632, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.809692] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688632, 'name': CreateVM_Task, 'duration_secs': 0.295368} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.809928] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1777.810622] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1777.810786] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1777.811115] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1777.811398] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4038c424-e9ee-40af-aa0e-168986b0d196 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.815901] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 1777.815901] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52070c8a-b604-7c58-1963-9eff78ff4474" [ 1777.815901] env[61964]: _type = "Task" [ 1777.815901] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.823909] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52070c8a-b604-7c58-1963-9eff78ff4474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.326263] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1778.326521] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1778.326757] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1778.883754] env[61964]: DEBUG nova.compute.manager [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Received event network-changed-5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1778.884075] env[61964]: DEBUG nova.compute.manager [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Refreshing instance network info cache due to event network-changed-5f991013-217a-4331-b17a-1effa9f95380. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1778.884172] env[61964]: DEBUG oslo_concurrency.lockutils [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] Acquiring lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1778.884397] env[61964]: DEBUG oslo_concurrency.lockutils [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] Acquired lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1778.884578] env[61964]: DEBUG nova.network.neutron [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Refreshing network info cache for port 5f991013-217a-4331-b17a-1effa9f95380 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1779.614754] env[61964]: DEBUG nova.network.neutron [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Updated VIF entry in instance network info cache for port 5f991013-217a-4331-b17a-1effa9f95380. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1779.615114] env[61964]: DEBUG nova.network.neutron [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Updating instance_info_cache with network_info: [{"id": "5f991013-217a-4331-b17a-1effa9f95380", "address": "fa:16:3e:60:e6:8d", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f991013-21", "ovs_interfaceid": "5f991013-217a-4331-b17a-1effa9f95380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.629020] env[61964]: DEBUG oslo_concurrency.lockutils [req-a519272d-945f-4f7e-a9b7-c8b35ef083ec req-d66f1c16-a739-400e-806e-1040c7037f74 service nova] Releasing lock "refresh_cache-2b69def4-b892-4d76-bfd2-841014f75098" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1779.783536] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "2b69def4-b892-4d76-bfd2-841014f75098" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1789.900552] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1789.900552] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1806.384067] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.384388] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1806.384388] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1806.407426] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.407637] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.407779] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.407931] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408050] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408160] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408280] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408398] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408517] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408703] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1806.408832] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1808.383595] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.383843] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.380580] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.383239] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.384526] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.384813] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1814.384913] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.384913] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.384240] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.397204] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1816.397432] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1816.397597] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1816.397804] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1816.399082] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7437095-1229-48c4-8621-05d46bf94d1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.407778] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe28e5aa-80c2-4f7d-b151-eb0dbf0c905c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.421768] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e8566a-b810-48ec-8884-b356a0c174f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.428470] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5497b0-509d-4b9f-b858-3865252633ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.457249] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181354MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1816.457398] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1816.457597] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1816.529768] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.529881] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530032] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530161] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530279] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530394] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530508] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530623] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530735] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.530857] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1816.543514] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.553901] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 26741651-12c2-4ef0-bbe4-37e981f9a7f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.563888] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.572335] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3453eda4-41f7-4558-a2cc-9dbce697c4e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.583201] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 30c8aea5-4f4a-42bd-adc0-d433c519b28c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.592409] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 05b4962d-2a56-40b2-a58d-9dd178160e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.602479] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ac28fb7-0325-43ee-9bb2-fac4e99a71e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.613139] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ffde9cd-fb06-4ff5-90d7-5173178aa699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.624178] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c067782a-36ce-4e03-888a-12a15dcd68c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.635053] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance dae54132-828d-4f84-b0d1-2b3b568882dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.644520] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 19d0d400-d8c1-4348-aef3-8de9c94af5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.654717] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9f16e97b-269e-4a39-8816-ee1a4d911450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.664339] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1816.664571] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1816.664718] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1816.919638] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bafaaa8-70f6-4fe3-b3b4-00b31feda8c5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.927744] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bfef8a-17b3-40b8-9249-ece053540ca6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.957461] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6c00bf-9ab5-4b23-b64f-c85c1e8c7a53 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.964712] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8248c447-5d24-48d3-9faa-181924d7e47e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.977312] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.986060] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1816.999549] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1816.999736] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.542s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1822.696924] env[61964]: WARNING oslo_vmware.rw_handles [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1822.696924] env[61964]: ERROR oslo_vmware.rw_handles [ 1822.697680] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1822.699193] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1822.699446] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Copying Virtual Disk [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/0b20afd6-fd59-41ca-a8de-3fa023b8c0ef/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1822.699715] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7af6af8-b595-478e-b982-fd2e398dad9e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.709455] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1822.709455] env[61964]: value = "task-1688633" [ 1822.709455] env[61964]: _type = "Task" [ 1822.709455] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.716836] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.219597] env[61964]: DEBUG oslo_vmware.exceptions [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1823.220078] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1823.220770] env[61964]: ERROR nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1823.220770] env[61964]: Faults: ['InvalidArgument'] [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Traceback (most recent call last): [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] yield resources [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self.driver.spawn(context, instance, image_meta, [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self._fetch_image_if_missing(context, vi) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] image_cache(vi, tmp_image_ds_loc) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] vm_util.copy_virtual_disk( [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] session._wait_for_task(vmdk_copy_task) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return self.wait_for_task(task_ref) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return evt.wait() [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] result = hub.switch() [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return self.greenlet.switch() [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self.f(*self.args, **self.kw) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] raise exceptions.translate_fault(task_info.error) [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Faults: ['InvalidArgument'] [ 1823.220770] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] [ 1823.224467] env[61964]: INFO nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Terminating instance [ 1823.224467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1823.224467] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.224467] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df5f5965-a88c-460c-bcb0-9528ff79ce2a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.226746] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1823.227074] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1823.227933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c823f0fa-5166-474f-8c5d-a6c7e3db2ac2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.235083] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1823.236211] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c82a41c-994a-4bab-9cbd-80cf4cb4081a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.237802] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.238137] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1823.239753] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5eb5fc0-b2c7-4757-b918-fdef87bba442 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.244186] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for the task: (returnval){ [ 1823.244186] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d60622-fca8-65e1-df89-c6d0d4ad69e1" [ 1823.244186] env[61964]: _type = "Task" [ 1823.244186] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.253134] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d60622-fca8-65e1-df89-c6d0d4ad69e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.322026] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1823.322026] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1823.322026] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleting the datastore file [datastore1] 31e6ec86-cf5a-438c-ad8f-aad775fbb376 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1823.322026] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d11cdb86-6fe8-4060-bbd8-e6416b25870b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.327443] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for the task: (returnval){ [ 1823.327443] env[61964]: value = "task-1688635" [ 1823.327443] env[61964]: _type = "Task" [ 1823.327443] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.334673] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.754390] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1823.754667] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Creating directory with path [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.755047] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48b2c512-8ba6-4b69-9e9d-7c187bbf7d0e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.766755] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Created directory with path [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.766962] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Fetch image to [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1823.767147] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1823.767946] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a5649c-8286-4f49-9c64-ea3988885aad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.774572] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf11c667-b59b-4364-8443-b094557d24ef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.783544] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba4304e-6ae4-4ee4-8615-c0ce05d4633c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.812720] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16e079a-e049-4188-baef-b189c05ee6fe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.817731] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-12cb6f7d-3220-46a3-bfc1-dae799dc5fe1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.836383] env[61964]: DEBUG oslo_vmware.api [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Task: {'id': task-1688635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075383} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.836602] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1823.836775] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1823.836936] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1823.837116] env[61964]: INFO nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1823.839915] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1823.841782] env[61964]: DEBUG nova.compute.claims [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1823.841952] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1823.842176] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1823.896478] env[61964]: DEBUG oslo_vmware.rw_handles [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1823.956065] env[61964]: DEBUG oslo_vmware.rw_handles [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1823.956065] env[61964]: DEBUG oslo_vmware.rw_handles [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1824.181028] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc94b54c-e6f4-4b9d-b442-9556193fc43a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.188495] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9985a2e3-3f7a-48af-a507-e2edf1fd3ee3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.217826] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656c64f0-bb19-4edb-8c88-f7fbbb2068b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.224915] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e90c209-6dc3-47e6-8196-af0e4fb810f9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.238070] env[61964]: DEBUG nova.compute.provider_tree [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.249403] env[61964]: DEBUG nova.scheduler.client.report [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1824.264270] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1824.264805] env[61964]: ERROR nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1824.264805] env[61964]: Faults: ['InvalidArgument'] [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Traceback (most recent call last): [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self.driver.spawn(context, instance, image_meta, [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self._fetch_image_if_missing(context, vi) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] image_cache(vi, tmp_image_ds_loc) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] vm_util.copy_virtual_disk( [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] session._wait_for_task(vmdk_copy_task) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return self.wait_for_task(task_ref) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return evt.wait() [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] result = hub.switch() [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] return self.greenlet.switch() [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] self.f(*self.args, **self.kw) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] raise exceptions.translate_fault(task_info.error) [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Faults: ['InvalidArgument'] [ 1824.264805] env[61964]: ERROR nova.compute.manager [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] [ 1824.265704] env[61964]: DEBUG nova.compute.utils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1824.267209] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Build of instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 was re-scheduled: A specified parameter was not correct: fileType [ 1824.267209] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1824.267607] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1824.267833] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1824.268010] env[61964]: DEBUG nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1824.268179] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1824.631471] env[61964]: DEBUG nova.network.neutron [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.645044] env[61964]: INFO nova.compute.manager [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Took 0.38 seconds to deallocate network for instance. [ 1824.746170] env[61964]: INFO nova.scheduler.client.report [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Deleted allocations for instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 [ 1824.769443] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bba107eb-d72f-41c6-adb7-2b190c15afd3 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 375.602s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1824.770658] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 177.762s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1824.770875] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Acquiring lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1824.771101] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1824.771272] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1824.774661] env[61964]: INFO nova.compute.manager [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Terminating instance [ 1824.776769] env[61964]: DEBUG nova.compute.manager [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1824.776860] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1824.777102] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e38bc764-497d-44cb-9396-6772029c0f13 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.786741] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf75ae6-480b-401f-8812-c16cb95f5b1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.797663] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1824.818229] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31e6ec86-cf5a-438c-ad8f-aad775fbb376 could not be found. [ 1824.818488] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1824.818747] env[61964]: INFO nova.compute.manager [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1824.819348] env[61964]: DEBUG oslo.service.loopingcall [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.819465] env[61964]: DEBUG nova.compute.manager [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1824.819614] env[61964]: DEBUG nova.network.neutron [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1824.845054] env[61964]: DEBUG nova.network.neutron [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.853022] env[61964]: INFO nova.compute.manager [-] [instance: 31e6ec86-cf5a-438c-ad8f-aad775fbb376] Took 0.03 seconds to deallocate network for instance. [ 1824.858158] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1824.858472] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1824.859962] env[61964]: INFO nova.compute.claims [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1824.942880] env[61964]: DEBUG oslo_concurrency.lockutils [None req-76ef88db-95ef-4495-8d10-79eddbd17375 tempest-ListImageFiltersTestJSON-1869108960 tempest-ListImageFiltersTestJSON-1869108960-project-member] Lock "31e6ec86-cf5a-438c-ad8f-aad775fbb376" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1825.173237] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ff642a-4db2-4acc-9a3c-546302f9d183 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.180902] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296dda96-e964-4f7f-9552-31db026362d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.211123] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c11dff1-bc4a-49fa-b30d-22a9a93cb58b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.218892] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e11bb31-6373-4f98-9b07-626205e7cfdd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.231963] env[61964]: DEBUG nova.compute.provider_tree [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.240754] env[61964]: DEBUG nova.scheduler.client.report [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1825.259812] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1825.260242] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1825.307033] env[61964]: DEBUG nova.compute.utils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1825.308566] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1825.308868] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1825.322281] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1825.388043] env[61964]: DEBUG nova.policy [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc0c636ea87844e79608019bb1517f2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ddab57a7ff974c078cd790e6d01968b1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1825.395273] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1825.415746] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1825.415746] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1825.415746] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1825.415746] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1825.416087] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1825.416087] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1825.416203] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1825.416361] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1825.416525] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1825.416683] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1825.416853] env[61964]: DEBUG nova.virt.hardware [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1825.418143] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005aad0c-37b6-4eab-8aa8-357fe1a779a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.426105] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00459082-ed26-4546-ad89-708a8970eedd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.794699] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Successfully created port: 59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1826.555090] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Successfully updated port: 59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1826.567847] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1826.568190] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquired lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1826.568190] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1826.626819] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1826.864248] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Updating instance_info_cache with network_info: [{"id": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "address": "fa:16:3e:2a:27:0f", "network": {"id": "7db9c589-c810-466d-81d5-37cc9a2424c4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1035911621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ddab57a7ff974c078cd790e6d01968b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c8f72a-c4", "ovs_interfaceid": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.875875] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Releasing lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1826.875875] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance network_info: |[{"id": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "address": "fa:16:3e:2a:27:0f", "network": {"id": "7db9c589-c810-466d-81d5-37cc9a2424c4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1035911621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ddab57a7ff974c078cd790e6d01968b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c8f72a-c4", "ovs_interfaceid": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1826.875875] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:27:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd298db54-f13d-4bf6-b6c2-755074b3047f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1826.885111] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Creating folder: Project (ddab57a7ff974c078cd790e6d01968b1). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1826.886967] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-157e5127-afbe-440e-a83b-819e475e9553 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.898515] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Created folder: Project (ddab57a7ff974c078cd790e6d01968b1) in parent group-v351942. [ 1826.898714] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Creating folder: Instances. Parent ref: group-v351994. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1826.898961] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfbacbdb-4e2d-4daf-85af-25ad4b80a156 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.908461] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Created folder: Instances in parent group-v351994. [ 1826.908695] env[61964]: DEBUG oslo.service.loopingcall [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.908898] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1826.909120] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c3da319-6800-4aef-9319-cdea6808068d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.926796] env[61964]: DEBUG nova.compute.manager [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Received event network-vif-plugged-59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1826.926796] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Acquiring lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1826.926901] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1826.927024] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1826.927183] env[61964]: DEBUG nova.compute.manager [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] No waiting events found dispatching network-vif-plugged-59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1826.927335] env[61964]: WARNING nova.compute.manager [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Received unexpected event network-vif-plugged-59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc for instance with vm_state building and task_state spawning. [ 1826.927909] env[61964]: DEBUG nova.compute.manager [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Received event network-changed-59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1826.927909] env[61964]: DEBUG nova.compute.manager [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Refreshing instance network info cache due to event network-changed-59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1826.927909] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Acquiring lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1826.928074] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Acquired lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1826.928115] env[61964]: DEBUG nova.network.neutron [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Refreshing network info cache for port 59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1826.934630] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1826.934630] env[61964]: value = "task-1688638" [ 1826.934630] env[61964]: _type = "Task" [ 1826.934630] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.945692] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688638, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.282642] env[61964]: DEBUG nova.network.neutron [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Updated VIF entry in instance network info cache for port 59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1827.283046] env[61964]: DEBUG nova.network.neutron [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Updating instance_info_cache with network_info: [{"id": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "address": "fa:16:3e:2a:27:0f", "network": {"id": "7db9c589-c810-466d-81d5-37cc9a2424c4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1035911621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ddab57a7ff974c078cd790e6d01968b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d298db54-f13d-4bf6-b6c2-755074b3047f", "external-id": "nsx-vlan-transportzone-631", "segmentation_id": 631, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c8f72a-c4", "ovs_interfaceid": "59c8f72a-c4a6-46aa-bacc-0c83e17dd9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.294789] env[61964]: DEBUG oslo_concurrency.lockutils [req-c93cef79-64c3-413c-867b-8f2047d2998b req-a06eeb97-895d-417a-9b3e-fa3b3b4eee7b service nova] Releasing lock "refresh_cache-c73b38c1-53d0-4c98-814f-b6b8984bbaf5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1827.445427] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688638, 'name': CreateVM_Task, 'duration_secs': 0.339496} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.445612] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1827.446308] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1827.446468] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1827.446823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1827.447086] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-363ca8f7-a44b-4683-b6fe-b756d48b8afc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.452090] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for the task: (returnval){ [ 1827.452090] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52eb7ff0-8b85-c0b3-89cb-e72a3bda6e20" [ 1827.452090] env[61964]: _type = "Task" [ 1827.452090] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.461807] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52eb7ff0-8b85-c0b3-89cb-e72a3bda6e20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.965377] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1827.966354] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1827.967044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1830.871517] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.132796] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "59c25eab-011e-4690-99fe-976f8dbea580" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.133223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1854.227481] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "b6c97be0-e146-46b1-8d2e-085818e45835" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1854.227481] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1854.693657] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Acquiring lock "9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1854.693947] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1854.723815] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Acquiring lock "81146564-0c00-4230-9d51-d6cfb68c9597" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1854.723815] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "81146564-0c00-4230-9d51-d6cfb68c9597" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1854.761773] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Acquiring lock "a9d07839-7511-40e1-bf24-c8d83559cffe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1854.761773] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "a9d07839-7511-40e1-bf24-c8d83559cffe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1858.778677] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1858.778923] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1858.802899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "9a228ad9-96ae-471c-961b-60d93c70d6c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1858.803137] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9a228ad9-96ae-471c-961b-60d93c70d6c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1863.335168] env[61964]: DEBUG oslo_concurrency.lockutils [None req-409c2344-e23f-45f7-aad4-39a137ab723c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "f9a55f7d-b347-4d07-b98b-18178271d039" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1863.335535] env[61964]: DEBUG oslo_concurrency.lockutils [None req-409c2344-e23f-45f7-aad4-39a137ab723c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "f9a55f7d-b347-4d07-b98b-18178271d039" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1865.612416] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e4bc9351-fecf-4365-b0d0-f0a4888e5274 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "049292ed-1aab-4ea3-930b-f34822b4fb73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1865.612951] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e4bc9351-fecf-4365-b0d0-f0a4888e5274 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "049292ed-1aab-4ea3-930b-f34822b4fb73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1868.000674] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.000674] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1868.000674] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1868.029213] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1868.385981] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1869.386577] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1869.508473] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d4f910d7-1107-43f5-8a50-73aae32a9344 tempest-ServersNegativeTestJSON-817738709 tempest-ServersNegativeTestJSON-817738709-project-member] Acquiring lock "3699043f-9be3-4997-bc40-6d9bb77fbcba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1869.508741] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d4f910d7-1107-43f5-8a50-73aae32a9344 tempest-ServersNegativeTestJSON-817738709 tempest-ServersNegativeTestJSON-817738709-project-member] Lock "3699043f-9be3-4997-bc40-6d9bb77fbcba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1872.713196] env[61964]: WARNING oslo_vmware.rw_handles [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1872.713196] env[61964]: ERROR oslo_vmware.rw_handles [ 1872.713844] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1872.715918] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1872.716714] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Copying Virtual Disk [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/cf155b97-8b7d-4046-98ee-dd58790aa937/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1872.717148] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d75ef10-b4e2-4e78-b1df-131e3a69c604 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.728474] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for the task: (returnval){ [ 1872.728474] env[61964]: value = "task-1688639" [ 1872.728474] env[61964]: _type = "Task" [ 1872.728474] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.739298] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Task: {'id': task-1688639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.890441] env[61964]: DEBUG oslo_concurrency.lockutils [None req-43325e3d-4d97-40dc-b56f-04e2175cf04d tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "3fd098ca-116f-4bc3-9e39-404bf4968a66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1872.890670] env[61964]: DEBUG oslo_concurrency.lockutils [None req-43325e3d-4d97-40dc-b56f-04e2175cf04d tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "3fd098ca-116f-4bc3-9e39-404bf4968a66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1873.238174] env[61964]: DEBUG oslo_vmware.exceptions [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1873.238473] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1873.239039] env[61964]: ERROR nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1873.239039] env[61964]: Faults: ['InvalidArgument'] [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Traceback (most recent call last): [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] yield resources [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self.driver.spawn(context, instance, image_meta, [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self._fetch_image_if_missing(context, vi) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] image_cache(vi, tmp_image_ds_loc) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] vm_util.copy_virtual_disk( [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] session._wait_for_task(vmdk_copy_task) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return self.wait_for_task(task_ref) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return evt.wait() [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] result = hub.switch() [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return self.greenlet.switch() [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self.f(*self.args, **self.kw) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] raise exceptions.translate_fault(task_info.error) [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Faults: ['InvalidArgument'] [ 1873.239039] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] [ 1873.239942] env[61964]: INFO nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Terminating instance [ 1873.240976] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1873.241194] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1873.241439] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f492ad57-0f08-4741-b7e7-8cef30b6fa6b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.244239] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1873.244455] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1873.245188] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b677ff-e7ae-4791-b229-a300188d07d4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.249373] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1873.249570] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1873.252155] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a27cf41-4ba3-42b7-b0fa-8d49baaf7b58 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.254414] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1873.254625] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5506656-bfa4-4173-8ff9-c001c5a9c3f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.258793] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for the task: (returnval){ [ 1873.258793] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523f94f5-a958-6001-1ffa-d5bdecc14cc8" [ 1873.258793] env[61964]: _type = "Task" [ 1873.258793] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.267216] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523f94f5-a958-6001-1ffa-d5bdecc14cc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.340277] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1873.340515] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1873.340708] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Deleting the datastore file [datastore1] 2325430a-6b1a-41d9-bc13-fd7d98e07e9e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1873.340972] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae4cc199-f242-46ae-82f2-2b280af75980 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.348037] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for the task: (returnval){ [ 1873.348037] env[61964]: value = "task-1688641" [ 1873.348037] env[61964]: _type = "Task" [ 1873.348037] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.355752] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Task: {'id': task-1688641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.379303] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.382921] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.383026] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.383210] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1873.769068] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1873.769384] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Creating directory with path [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1873.769613] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a207a2e6-f03b-4898-9529-59ad3b4bf45b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.780925] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Created directory with path [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1873.781085] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Fetch image to [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1873.781323] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1873.782168] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44894272-3d6e-4c44-8d41-e82265bcd39a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.789060] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dcb382-7869-4705-9ef3-7f8b09565451 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.798520] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb106387-ab58-4da6-b747-8d90473c992f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.830503] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c34111-3337-4709-8eb6-dd325e36dabf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.836646] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2975baa6-cc44-4b8c-a3a2-b80a87365987 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.857248] env[61964]: DEBUG oslo_vmware.api [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Task: {'id': task-1688641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069772} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.858614] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1873.858816] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1873.859055] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1873.859250] env[61964]: INFO nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1873.860983] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1873.863439] env[61964]: DEBUG nova.compute.claims [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1873.864088] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1873.864088] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1873.914281] env[61964]: DEBUG oslo_vmware.rw_handles [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1873.980468] env[61964]: DEBUG oslo_vmware.rw_handles [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1873.980468] env[61964]: DEBUG oslo_vmware.rw_handles [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1874.344103] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b152a704-d699-4fa5-801d-a4467d8fb7ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.350357] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d92023f-9dbf-4f66-9eb8-8ed6adf197e4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.380172] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911acfc8-ca09-47f1-ba5a-15fe30d1b081 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.387354] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802760c7-c39c-4833-9f64-49f62603b993 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.400504] env[61964]: DEBUG nova.compute.provider_tree [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1874.408996] env[61964]: DEBUG nova.scheduler.client.report [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1874.422329] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.558s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1874.423082] env[61964]: ERROR nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1874.423082] env[61964]: Faults: ['InvalidArgument'] [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Traceback (most recent call last): [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self.driver.spawn(context, instance, image_meta, [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self._fetch_image_if_missing(context, vi) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] image_cache(vi, tmp_image_ds_loc) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] vm_util.copy_virtual_disk( [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] session._wait_for_task(vmdk_copy_task) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return self.wait_for_task(task_ref) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return evt.wait() [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] result = hub.switch() [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] return self.greenlet.switch() [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] self.f(*self.args, **self.kw) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] raise exceptions.translate_fault(task_info.error) [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Faults: ['InvalidArgument'] [ 1874.423082] env[61964]: ERROR nova.compute.manager [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] [ 1874.424023] env[61964]: DEBUG nova.compute.utils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1874.424876] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Build of instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e was re-scheduled: A specified parameter was not correct: fileType [ 1874.424876] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1874.425258] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1874.425471] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1874.425589] env[61964]: DEBUG nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1874.425745] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1874.794380] env[61964]: DEBUG nova.network.neutron [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.812750] env[61964]: INFO nova.compute.manager [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Took 0.39 seconds to deallocate network for instance. [ 1874.944058] env[61964]: INFO nova.scheduler.client.report [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Deleted allocations for instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e [ 1874.963952] env[61964]: DEBUG oslo_concurrency.lockutils [None req-06e5a533-658c-44e9-8195-3d954184d987 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.279s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1874.969018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 223.539s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1874.969018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Acquiring lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1874.969018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1874.969018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1874.969780] env[61964]: INFO nova.compute.manager [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Terminating instance [ 1874.972281] env[61964]: DEBUG nova.compute.manager [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1874.972699] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1874.972776] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04eda8d2-c07c-43ad-937a-6ebea21ab26a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.982514] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c656a15a-2c6b-40ba-9eb1-88c78cca9726 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.996876] env[61964]: DEBUG nova.compute.manager [None req-a1ec4d5f-0426-48a2-91f3-2cb728392611 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 26741651-12c2-4ef0-bbe4-37e981f9a7f3] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1875.019706] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2325430a-6b1a-41d9-bc13-fd7d98e07e9e could not be found. [ 1875.019917] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1875.020104] env[61964]: INFO nova.compute.manager [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1875.020358] env[61964]: DEBUG oslo.service.loopingcall [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1875.020576] env[61964]: DEBUG nova.compute.manager [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1875.020670] env[61964]: DEBUG nova.network.neutron [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1875.023050] env[61964]: DEBUG nova.compute.manager [None req-a1ec4d5f-0426-48a2-91f3-2cb728392611 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 26741651-12c2-4ef0-bbe4-37e981f9a7f3] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1875.055065] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a1ec4d5f-0426-48a2-91f3-2cb728392611 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "26741651-12c2-4ef0-bbe4-37e981f9a7f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.176s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1875.066656] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1875.082108] env[61964]: DEBUG nova.network.neutron [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.092425] env[61964]: INFO nova.compute.manager [-] [instance: 2325430a-6b1a-41d9-bc13-fd7d98e07e9e] Took 0.07 seconds to deallocate network for instance. [ 1875.131028] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1875.131286] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1875.132844] env[61964]: INFO nova.compute.claims [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1875.195481] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2986ee7-8280-4e19-af70-f4ec0b854ee1 tempest-ServersWithSpecificFlavorTestJSON-2043036995 tempest-ServersWithSpecificFlavorTestJSON-2043036995-project-member] Lock "2325430a-6b1a-41d9-bc13-fd7d98e07e9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1875.384165] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1875.639440] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df00d8f-d70e-4151-88d3-21cd74f6d29c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.648993] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5208cb-c8ad-41c2-914c-159a17524ea1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.684390] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d22f8d9-b2fe-43b0-84ed-97ca6e6980c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.692444] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8e93fd-e5c3-4735-ab01-c0957a3eaaea {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.705564] env[61964]: DEBUG nova.compute.provider_tree [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.720375] env[61964]: DEBUG nova.scheduler.client.report [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1875.738399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.606s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1875.738399] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1875.776098] env[61964]: DEBUG nova.compute.utils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1875.779087] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1875.779259] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1875.795212] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1875.849270] env[61964]: DEBUG nova.policy [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2021b64ab4364311a3508e21301435d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3de7a5bbddb14ddaaaf575821e68b537', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1875.867186] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1875.904704] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1875.904953] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1875.905131] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.905314] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1875.905461] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.905607] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1875.905812] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1875.905968] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1875.906240] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1875.906418] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1875.906588] env[61964]: DEBUG nova.virt.hardware [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1875.907908] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b098fdca-b489-4acf-b9ea-8bcc88146345 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.916864] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2017e44c-7992-4131-87d2-9e84dc97313e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.383853] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.535154] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Successfully created port: b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1876.574102] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "63911858-5a79-4479-8c92-46afca980300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1876.574343] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1877.589814] env[61964]: DEBUG nova.compute.manager [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Received event network-vif-plugged-b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1877.590067] env[61964]: DEBUG oslo_concurrency.lockutils [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] Acquiring lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1877.590251] env[61964]: DEBUG oslo_concurrency.lockutils [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1877.590419] env[61964]: DEBUG oslo_concurrency.lockutils [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1877.591235] env[61964]: DEBUG nova.compute.manager [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] No waiting events found dispatching network-vif-plugged-b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1877.591485] env[61964]: WARNING nova.compute.manager [req-5d2d4f13-eae9-4a58-8a8b-18c72ffccfdc req-69935987-beec-407a-9b21-e6aeecaef85c service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Received unexpected event network-vif-plugged-b084669c-5882-44bb-bf71-c2c7ea6e01c7 for instance with vm_state building and task_state spawning. [ 1877.614869] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Successfully updated port: b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1877.631026] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1877.631026] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquired lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1877.631026] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1877.678494] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1877.936812] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Updating instance_info_cache with network_info: [{"id": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "address": "fa:16:3e:c1:1f:a0", "network": {"id": "11f2ce73-0aeb-4cab-bc38-d3979305c56e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-567226217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3de7a5bbddb14ddaaaf575821e68b537", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb084669c-58", "ovs_interfaceid": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.948798] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Releasing lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1877.949109] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance network_info: |[{"id": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "address": "fa:16:3e:c1:1f:a0", "network": {"id": "11f2ce73-0aeb-4cab-bc38-d3979305c56e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-567226217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3de7a5bbddb14ddaaaf575821e68b537", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb084669c-58", "ovs_interfaceid": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1877.949615] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:1f:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '419a5b3f-4c6f-4168-9def-746b4d8c5c24', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b084669c-5882-44bb-bf71-c2c7ea6e01c7', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.957071] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Creating folder: Project (3de7a5bbddb14ddaaaf575821e68b537). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1877.957608] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47dd3a6f-1697-46da-9d6a-5724a8219f9a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.971021] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Created folder: Project (3de7a5bbddb14ddaaaf575821e68b537) in parent group-v351942. [ 1877.971021] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Creating folder: Instances. Parent ref: group-v351997. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1877.971021] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31660a53-5169-468d-baf7-19fc9ac296e2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.982905] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Created folder: Instances in parent group-v351997. [ 1877.983140] env[61964]: DEBUG oslo.service.loopingcall [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.983320] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1877.983516] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea3ce169-7ef4-48b9-a59e-9655c8024bc2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.003061] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.003061] env[61964]: value = "task-1688644" [ 1878.003061] env[61964]: _type = "Task" [ 1878.003061] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.010755] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688644, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.384080] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.395663] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1878.395769] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1878.395916] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1878.396086] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1878.397184] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dc5aff-6f52-4256-a55a-be04fb6b3c6a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.406006] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649c3a48-36ce-4097-a213-c84d589daaec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.422132] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385c344d-f8a1-417c-ba3c-eb0fe6cc211d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.428038] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c00263a-9908-4ea4-8011-aff5416f77e2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.456915] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181359MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1878.457085] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1878.457285] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1878.515484] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688644, 'name': CreateVM_Task} progress is 99%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.535781] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.535956] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536116] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536249] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536391] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536516] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536627] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536741] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536854] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.536965] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1878.551292] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3453eda4-41f7-4558-a2cc-9dbce697c4e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.563301] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 30c8aea5-4f4a-42bd-adc0-d433c519b28c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.596204] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 05b4962d-2a56-40b2-a58d-9dd178160e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.610039] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ac28fb7-0325-43ee-9bb2-fac4e99a71e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.620774] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0ffde9cd-fb06-4ff5-90d7-5173178aa699 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.630710] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c067782a-36ce-4e03-888a-12a15dcd68c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.640482] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance dae54132-828d-4f84-b0d1-2b3b568882dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.650029] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 19d0d400-d8c1-4348-aef3-8de9c94af5e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.661661] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9f16e97b-269e-4a39-8816-ee1a4d911450 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.671962] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.682538] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.693535] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.705630] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.716438] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 81146564-0c00-4230-9d51-d6cfb68c9597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.728338] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a9d07839-7511-40e1-bf24-c8d83559cffe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.738592] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.749030] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9a228ad9-96ae-471c-961b-60d93c70d6c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.758372] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f9a55f7d-b347-4d07-b98b-18178271d039 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.768671] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 049292ed-1aab-4ea3-930b-f34822b4fb73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.778953] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3699043f-9be3-4997-bc40-6d9bb77fbcba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.789217] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3fd098ca-116f-4bc3-9e39-404bf4968a66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.799846] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1878.800112] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1878.800260] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1879.015571] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688644, 'name': CreateVM_Task, 'duration_secs': 0.580919} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.015741] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1879.016535] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1879.016705] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1879.017030] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1879.017397] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-893cd7c7-b18a-443d-9d5f-008744c30952 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.024943] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for the task: (returnval){ [ 1879.024943] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c8063-89bc-c730-89de-11fc91874062" [ 1879.024943] env[61964]: _type = "Task" [ 1879.024943] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.033110] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c8063-89bc-c730-89de-11fc91874062, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.178632] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4def32e-1039-4bf0-b14e-0eb6265d2af8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.186281] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c352916-aa63-49a8-9101-fdc59806957c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.215325] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7cca5e-dd6a-4645-847c-f88651669287 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.222522] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34a7d6a-d9de-4ce4-a2fc-856b8824486f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.236376] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.245362] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1879.258707] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1879.258950] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.802s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1879.535171] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1879.535443] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1879.535682] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1879.620311] env[61964]: DEBUG nova.compute.manager [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Received event network-changed-b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1879.620668] env[61964]: DEBUG nova.compute.manager [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Refreshing instance network info cache due to event network-changed-b084669c-5882-44bb-bf71-c2c7ea6e01c7. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1879.620861] env[61964]: DEBUG oslo_concurrency.lockutils [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] Acquiring lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1879.620861] env[61964]: DEBUG oslo_concurrency.lockutils [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] Acquired lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1879.620981] env[61964]: DEBUG nova.network.neutron [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Refreshing network info cache for port b084669c-5882-44bb-bf71-c2c7ea6e01c7 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1879.943155] env[61964]: DEBUG nova.network.neutron [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Updated VIF entry in instance network info cache for port b084669c-5882-44bb-bf71-c2c7ea6e01c7. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1879.943562] env[61964]: DEBUG nova.network.neutron [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Updating instance_info_cache with network_info: [{"id": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "address": "fa:16:3e:c1:1f:a0", "network": {"id": "11f2ce73-0aeb-4cab-bc38-d3979305c56e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-567226217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3de7a5bbddb14ddaaaf575821e68b537", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "419a5b3f-4c6f-4168-9def-746b4d8c5c24", "external-id": "nsx-vlan-transportzone-656", "segmentation_id": 656, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb084669c-58", "ovs_interfaceid": "b084669c-5882-44bb-bf71-c2c7ea6e01c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.953452] env[61964]: DEBUG oslo_concurrency.lockutils [req-6639cd3a-b593-4434-a1e3-b91a32854899 req-a3c7e82d-1f3b-4c02-8ab9-f48af9685059 service nova] Releasing lock "refresh_cache-66787186-e8c6-4700-9caf-bd7e7970b65d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1881.124274] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "66787186-e8c6-4700-9caf-bd7e7970b65d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1885.254203] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.849016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-355765b5-a69a-4608-886c-0b96d103db89 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Acquiring lock "c68c1fe4-ef07-4bb7-b9be-16e02d0bd855" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1892.849418] env[61964]: DEBUG oslo_concurrency.lockutils [None req-355765b5-a69a-4608-886c-0b96d103db89 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Lock "c68c1fe4-ef07-4bb7-b9be-16e02d0bd855" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1908.852939] env[61964]: DEBUG oslo_concurrency.lockutils [None req-70b27178-b569-4d03-845f-529b82a14ddb tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Acquiring lock "0dad08e5-1725-4e1e-98f1-068da1f9edcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1908.853266] env[61964]: DEBUG oslo_concurrency.lockutils [None req-70b27178-b569-4d03-845f-529b82a14ddb tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Lock "0dad08e5-1725-4e1e-98f1-068da1f9edcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1909.380978] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc1bdcd5-d243-4280-9725-4cf31043d27d tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Acquiring lock "fa5eb87f-8546-4e29-b3d2-0e898d113beb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1909.381213] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc1bdcd5-d243-4280-9725-4cf31043d27d tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Lock "fa5eb87f-8546-4e29-b3d2-0e898d113beb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1912.493031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2c7bf34-e1f1-4f64-a2bd-fd52a4794cbe tempest-ServerShowV254Test-650985770 tempest-ServerShowV254Test-650985770-project-member] Acquiring lock "22d0050d-4654-4e63-ae79-bc209d714635" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1912.498575] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2c7bf34-e1f1-4f64-a2bd-fd52a4794cbe tempest-ServerShowV254Test-650985770 tempest-ServerShowV254Test-650985770-project-member] Lock "22d0050d-4654-4e63-ae79-bc209d714635" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.006s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1919.741341] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f5210043-641c-4387-9b11-7aa883579e93 tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Acquiring lock "149b16e3-d4d7-48c8-a7e4-32d869e82615" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1919.741658] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f5210043-641c-4387-9b11-7aa883579e93 tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "149b16e3-d4d7-48c8-a7e4-32d869e82615" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1920.700275] env[61964]: WARNING oslo_vmware.rw_handles [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1920.700275] env[61964]: ERROR oslo_vmware.rw_handles [ 1920.700275] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1920.701133] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1920.701510] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Copying Virtual Disk [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/8c26a4b7-9371-497e-8be3-6d670e125dd4/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1920.701916] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53b50a99-c9d1-40ab-b880-50e0d29c5c48 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.714024] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for the task: (returnval){ [ 1920.714024] env[61964]: value = "task-1688645" [ 1920.714024] env[61964]: _type = "Task" [ 1920.714024] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.721051] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Task: {'id': task-1688645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.223075] env[61964]: DEBUG oslo_vmware.exceptions [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1921.223368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1921.223972] env[61964]: ERROR nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1921.223972] env[61964]: Faults: ['InvalidArgument'] [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Traceback (most recent call last): [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] yield resources [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self.driver.spawn(context, instance, image_meta, [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self._fetch_image_if_missing(context, vi) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] image_cache(vi, tmp_image_ds_loc) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] vm_util.copy_virtual_disk( [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] session._wait_for_task(vmdk_copy_task) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return self.wait_for_task(task_ref) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return evt.wait() [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] result = hub.switch() [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return self.greenlet.switch() [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self.f(*self.args, **self.kw) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] raise exceptions.translate_fault(task_info.error) [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Faults: ['InvalidArgument'] [ 1921.223972] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] [ 1921.224857] env[61964]: INFO nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Terminating instance [ 1921.225864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1921.226096] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1921.226335] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69477393-e81d-4938-a0df-8beb9a5747ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.228749] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1921.228941] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1921.229748] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a26c0-dc86-44bf-9fbd-64933baa89fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.237113] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1921.237341] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ac004ab-0f74-45d7-ab26-6794b3ea71ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.239717] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1921.239896] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1921.240856] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5c31d98-be3e-4ccf-a734-f56038e7fe6b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.245550] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 1921.245550] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9ca82-eb2e-2183-14ff-2a0ab977d83a" [ 1921.245550] env[61964]: _type = "Task" [ 1921.245550] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.258300] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9ca82-eb2e-2183-14ff-2a0ab977d83a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.311465] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1921.311698] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1921.311888] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Deleting the datastore file [datastore1] ebcc4aaa-3506-4b4f-80da-532c3f7bb891 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1921.312178] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe784103-f4cd-44b6-afec-bcc36d7a3406 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.318984] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for the task: (returnval){ [ 1921.318984] env[61964]: value = "task-1688647" [ 1921.318984] env[61964]: _type = "Task" [ 1921.318984] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.327574] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Task: {'id': task-1688647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.756800] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1921.757142] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating directory with path [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1921.757450] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1952f23-079b-4564-a9e0-600612cfd4a1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.769454] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created directory with path [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1921.769696] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Fetch image to [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1921.769917] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1921.770766] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6437c0d7-2a17-41a5-a4b6-ab726f41ef6e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.779846] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cca0dc-1383-4b47-9ad0-8a544db2a8ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.791470] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be886aee-a6b8-440c-ad19-8c488fa8ce7b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.829831] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3752df60-712c-4cbc-ac60-8cd0a377cede {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.837189] env[61964]: DEBUG oslo_vmware.api [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Task: {'id': task-1688647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095064} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.838722] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1921.838938] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1921.839123] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1921.839292] env[61964]: INFO nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1921.841454] env[61964]: DEBUG nova.compute.claims [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1921.841622] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1921.841851] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1921.844480] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d1fa35d1-4870-4536-b763-747b2987d320 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.866595] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1921.939386] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1922.001907] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1922.002107] env[61964]: DEBUG oslo_vmware.rw_handles [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1922.364835] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba62be73-c575-4349-a3ae-97d9b24f5652 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.375384] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6c2986-1ca7-4a66-9b5a-45f94c3c593f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.410597] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3003da04-e53f-4189-b4d2-93f6ca3deec0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.418970] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42e1ed3-d858-4066-a1fa-86e8c0123702 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.433331] env[61964]: DEBUG nova.compute.provider_tree [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.442491] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e9299c25-241d-4165-9440-9988c42982b6 tempest-ServersNegativeTestMultiTenantJSON-853977313 tempest-ServersNegativeTestMultiTenantJSON-853977313-project-member] Acquiring lock "720f4e4b-295e-4a1d-af1e-bfa6739844c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1922.442699] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e9299c25-241d-4165-9440-9988c42982b6 tempest-ServersNegativeTestMultiTenantJSON-853977313 tempest-ServersNegativeTestMultiTenantJSON-853977313-project-member] Lock "720f4e4b-295e-4a1d-af1e-bfa6739844c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1922.447060] env[61964]: DEBUG nova.scheduler.client.report [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1922.460396] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.618s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1922.460940] env[61964]: ERROR nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.460940] env[61964]: Faults: ['InvalidArgument'] [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Traceback (most recent call last): [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self.driver.spawn(context, instance, image_meta, [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self._fetch_image_if_missing(context, vi) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] image_cache(vi, tmp_image_ds_loc) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] vm_util.copy_virtual_disk( [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] session._wait_for_task(vmdk_copy_task) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return self.wait_for_task(task_ref) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return evt.wait() [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] result = hub.switch() [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] return self.greenlet.switch() [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] self.f(*self.args, **self.kw) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] raise exceptions.translate_fault(task_info.error) [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Faults: ['InvalidArgument'] [ 1922.460940] env[61964]: ERROR nova.compute.manager [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] [ 1922.462022] env[61964]: DEBUG nova.compute.utils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1922.463439] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Build of instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 was re-scheduled: A specified parameter was not correct: fileType [ 1922.463439] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1922.463874] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1922.464100] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1922.464313] env[61964]: DEBUG nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1922.464507] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1922.824766] env[61964]: DEBUG nova.network.neutron [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.835074] env[61964]: INFO nova.compute.manager [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Took 0.37 seconds to deallocate network for instance. [ 1922.944618] env[61964]: INFO nova.scheduler.client.report [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Deleted allocations for instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 [ 1922.965355] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e66ae6ac-e786-4647-848c-5169395fb4b4 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 469.003s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1922.967211] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 271.526s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1922.967629] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Acquiring lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1922.969792] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1922.969792] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1922.970611] env[61964]: INFO nova.compute.manager [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Terminating instance [ 1922.972359] env[61964]: DEBUG nova.compute.manager [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1922.972544] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1922.973113] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24b9edac-218a-4fb4-84f6-cac0134822cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.982187] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e457a5a0-6e5b-498f-b6fe-880d127f34df {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.994262] env[61964]: DEBUG nova.compute.manager [None req-8defa999-b2a5-4e88-a6d8-f99dabcae3c7 tempest-InstanceActionsV221TestJSON-1256052157 tempest-InstanceActionsV221TestJSON-1256052157-project-member] [instance: 3453eda4-41f7-4558-a2cc-9dbce697c4e5] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.019068] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ebcc4aaa-3506-4b4f-80da-532c3f7bb891 could not be found. [ 1923.019068] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1923.019068] env[61964]: INFO nova.compute.manager [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1923.019068] env[61964]: DEBUG oslo.service.loopingcall [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.019068] env[61964]: DEBUG nova.compute.manager [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1923.019068] env[61964]: DEBUG nova.network.neutron [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1923.024131] env[61964]: DEBUG nova.compute.manager [None req-8defa999-b2a5-4e88-a6d8-f99dabcae3c7 tempest-InstanceActionsV221TestJSON-1256052157 tempest-InstanceActionsV221TestJSON-1256052157-project-member] [instance: 3453eda4-41f7-4558-a2cc-9dbce697c4e5] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.049096] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8defa999-b2a5-4e88-a6d8-f99dabcae3c7 tempest-InstanceActionsV221TestJSON-1256052157 tempest-InstanceActionsV221TestJSON-1256052157-project-member] Lock "3453eda4-41f7-4558-a2cc-9dbce697c4e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.498s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.049276] env[61964]: DEBUG nova.network.neutron [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.060512] env[61964]: DEBUG nova.compute.manager [None req-fcc9efb1-aa91-4c46-b085-6622bc8531bf tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] [instance: 30c8aea5-4f4a-42bd-adc0-d433c519b28c] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.063566] env[61964]: INFO nova.compute.manager [-] [instance: ebcc4aaa-3506-4b4f-80da-532c3f7bb891] Took 0.04 seconds to deallocate network for instance. [ 1923.087762] env[61964]: DEBUG nova.compute.manager [None req-fcc9efb1-aa91-4c46-b085-6622bc8531bf tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] [instance: 30c8aea5-4f4a-42bd-adc0-d433c519b28c] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.113646] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fcc9efb1-aa91-4c46-b085-6622bc8531bf tempest-AttachVolumeTestJSON-1046673496 tempest-AttachVolumeTestJSON-1046673496-project-member] Lock "30c8aea5-4f4a-42bd-adc0-d433c519b28c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.301s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.124439] env[61964]: DEBUG nova.compute.manager [None req-51e51f31-7000-4235-b2ba-31f3da799a4c tempest-ServerPasswordTestJSON-1096031617 tempest-ServerPasswordTestJSON-1096031617-project-member] [instance: 05b4962d-2a56-40b2-a58d-9dd178160e3a] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.149373] env[61964]: DEBUG nova.compute.manager [None req-51e51f31-7000-4235-b2ba-31f3da799a4c tempest-ServerPasswordTestJSON-1096031617 tempest-ServerPasswordTestJSON-1096031617-project-member] [instance: 05b4962d-2a56-40b2-a58d-9dd178160e3a] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.170023] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b5c206db-9cfc-40e1-9f8a-7aed7b149775 tempest-ServersTestFqdnHostnames-1431614773 tempest-ServersTestFqdnHostnames-1431614773-project-member] Lock "ebcc4aaa-3506-4b4f-80da-532c3f7bb891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.179210] env[61964]: DEBUG oslo_concurrency.lockutils [None req-51e51f31-7000-4235-b2ba-31f3da799a4c tempest-ServerPasswordTestJSON-1096031617 tempest-ServerPasswordTestJSON-1096031617-project-member] Lock "05b4962d-2a56-40b2-a58d-9dd178160e3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.425s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.187834] env[61964]: DEBUG nova.compute.manager [None req-2a01269d-4a8c-4e14-87e0-a34f26ea1472 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] [instance: 0ac28fb7-0325-43ee-9bb2-fac4e99a71e1] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.212210] env[61964]: DEBUG nova.compute.manager [None req-2a01269d-4a8c-4e14-87e0-a34f26ea1472 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] [instance: 0ac28fb7-0325-43ee-9bb2-fac4e99a71e1] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.234313] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2a01269d-4a8c-4e14-87e0-a34f26ea1472 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Lock "0ac28fb7-0325-43ee-9bb2-fac4e99a71e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.365s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.244404] env[61964]: DEBUG nova.compute.manager [None req-bb664791-d73d-4679-b21e-8ce4644cc9f7 tempest-ServerMetadataTestJSON-2041899630 tempest-ServerMetadataTestJSON-2041899630-project-member] [instance: 0ffde9cd-fb06-4ff5-90d7-5173178aa699] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.269542] env[61964]: DEBUG nova.compute.manager [None req-bb664791-d73d-4679-b21e-8ce4644cc9f7 tempest-ServerMetadataTestJSON-2041899630 tempest-ServerMetadataTestJSON-2041899630-project-member] [instance: 0ffde9cd-fb06-4ff5-90d7-5173178aa699] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.293343] env[61964]: DEBUG oslo_concurrency.lockutils [None req-bb664791-d73d-4679-b21e-8ce4644cc9f7 tempest-ServerMetadataTestJSON-2041899630 tempest-ServerMetadataTestJSON-2041899630-project-member] Lock "0ffde9cd-fb06-4ff5-90d7-5173178aa699" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.617s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.302695] env[61964]: DEBUG nova.compute.manager [None req-6094d48a-2f25-4259-96e9-492b451d5676 tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] [instance: c067782a-36ce-4e03-888a-12a15dcd68c3] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.327966] env[61964]: DEBUG nova.compute.manager [None req-6094d48a-2f25-4259-96e9-492b451d5676 tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] [instance: c067782a-36ce-4e03-888a-12a15dcd68c3] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.358689] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6094d48a-2f25-4259-96e9-492b451d5676 tempest-VolumesAdminNegativeTest-1802963962 tempest-VolumesAdminNegativeTest-1802963962-project-member] Lock "c067782a-36ce-4e03-888a-12a15dcd68c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.700s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.370319] env[61964]: DEBUG nova.compute.manager [None req-dbcc23d1-078b-41ec-b6cf-0175f061dc4b tempest-ServerActionsV293TestJSON-1598391829 tempest-ServerActionsV293TestJSON-1598391829-project-member] [instance: dae54132-828d-4f84-b0d1-2b3b568882dc] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.400892] env[61964]: DEBUG nova.compute.manager [None req-dbcc23d1-078b-41ec-b6cf-0175f061dc4b tempest-ServerActionsV293TestJSON-1598391829 tempest-ServerActionsV293TestJSON-1598391829-project-member] [instance: dae54132-828d-4f84-b0d1-2b3b568882dc] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.424757] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dbcc23d1-078b-41ec-b6cf-0175f061dc4b tempest-ServerActionsV293TestJSON-1598391829 tempest-ServerActionsV293TestJSON-1598391829-project-member] Lock "dae54132-828d-4f84-b0d1-2b3b568882dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.512s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.433724] env[61964]: DEBUG nova.compute.manager [None req-3cb9db90-2e0a-46d6-a9db-aeb009c1e856 tempest-ServerTagsTestJSON-1436814176 tempest-ServerTagsTestJSON-1436814176-project-member] [instance: 19d0d400-d8c1-4348-aef3-8de9c94af5e2] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.463390] env[61964]: DEBUG nova.compute.manager [None req-3cb9db90-2e0a-46d6-a9db-aeb009c1e856 tempest-ServerTagsTestJSON-1436814176 tempest-ServerTagsTestJSON-1436814176-project-member] [instance: 19d0d400-d8c1-4348-aef3-8de9c94af5e2] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.485220] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3cb9db90-2e0a-46d6-a9db-aeb009c1e856 tempest-ServerTagsTestJSON-1436814176 tempest-ServerTagsTestJSON-1436814176-project-member] Lock "19d0d400-d8c1-4348-aef3-8de9c94af5e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.671s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.495044] env[61964]: DEBUG nova.compute.manager [None req-eefbee43-8dd7-48e4-9688-b8c484edbcff tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 9f16e97b-269e-4a39-8816-ee1a4d911450] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.516634] env[61964]: DEBUG nova.compute.manager [None req-eefbee43-8dd7-48e4-9688-b8c484edbcff tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 9f16e97b-269e-4a39-8816-ee1a4d911450] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1923.539678] env[61964]: DEBUG oslo_concurrency.lockutils [None req-eefbee43-8dd7-48e4-9688-b8c484edbcff tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "9f16e97b-269e-4a39-8816-ee1a4d911450" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.489s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1923.548806] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1923.600421] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1923.600666] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1923.602266] env[61964]: INFO nova.compute.claims [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1923.973981] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483a9fc4-33b3-4ef2-aa6f-f69dbd0dcde8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.981412] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16894d6c-ffe2-4e9b-aeb6-a976df902c10 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.010105] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1140f2ba-2f2a-4772-bb7c-79832048643b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.016785] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5d3cb1-e516-4513-b8f1-a9f6e1428084 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.030105] env[61964]: DEBUG nova.compute.provider_tree [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1924.037853] env[61964]: DEBUG nova.scheduler.client.report [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1924.052954] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.452s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1924.053214] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1924.088330] env[61964]: DEBUG nova.compute.utils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1924.089967] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1924.090179] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1924.098942] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1924.151214] env[61964]: DEBUG nova.policy [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5f681afe3064ba9a23d968c38e0c578', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f33639e253b8461cbcfc48e472befc2b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1924.184470] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1924.218442] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1924.218748] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1924.218937] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1924.219672] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1924.219852] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1924.220057] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1924.220421] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1924.220561] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1924.220774] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1924.220961] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1924.221159] env[61964]: DEBUG nova.virt.hardware [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1924.222773] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f64e499-8150-4c0b-852f-2241727ff84b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.230802] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1289bd70-408b-4e68-8693-04c7349caadc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.642750] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Successfully created port: 37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1925.434857] env[61964]: DEBUG nova.compute.manager [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Received event network-vif-plugged-37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1925.435171] env[61964]: DEBUG oslo_concurrency.lockutils [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] Acquiring lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1925.435279] env[61964]: DEBUG oslo_concurrency.lockutils [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1925.435443] env[61964]: DEBUG oslo_concurrency.lockutils [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1925.436257] env[61964]: DEBUG nova.compute.manager [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] No waiting events found dispatching network-vif-plugged-37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1925.436257] env[61964]: WARNING nova.compute.manager [req-5e580802-33dd-47f1-bba5-f31ef1fff78e req-e89708e8-4d61-40ea-8137-6b764cc098dd service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Received unexpected event network-vif-plugged-37e96281-e7a9-42e6-998a-0cb07b1afe6c for instance with vm_state building and task_state spawning. [ 1925.533628] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Successfully updated port: 37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1925.549810] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1925.550022] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquired lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1925.550125] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1925.607206] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1925.794784] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Updating instance_info_cache with network_info: [{"id": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "address": "fa:16:3e:81:28:c6", "network": {"id": "7a5b224c-9582-456f-a965-c394f45c3be6", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-885685629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f33639e253b8461cbcfc48e472befc2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37e96281-e7", "ovs_interfaceid": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.806072] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Releasing lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1925.806353] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance network_info: |[{"id": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "address": "fa:16:3e:81:28:c6", "network": {"id": "7a5b224c-9582-456f-a965-c394f45c3be6", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-885685629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f33639e253b8461cbcfc48e472befc2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37e96281-e7", "ovs_interfaceid": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1925.806728] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:28:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37e96281-e7a9-42e6-998a-0cb07b1afe6c', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1925.814338] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Creating folder: Project (f33639e253b8461cbcfc48e472befc2b). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1925.814838] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed8b21f4-e102-4e62-81bc-ce778475bc84 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.824827] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Created folder: Project (f33639e253b8461cbcfc48e472befc2b) in parent group-v351942. [ 1925.825014] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Creating folder: Instances. Parent ref: group-v352000. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1925.825217] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7a32f9d-94af-4844-8824-ae1de7742ba8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.833384] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Created folder: Instances in parent group-v352000. [ 1925.833592] env[61964]: DEBUG oslo.service.loopingcall [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1925.833757] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1925.833936] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d812dc4c-d9e5-4ff1-8f92-586c3713ddfa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.851836] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1925.851836] env[61964]: value = "task-1688650" [ 1925.851836] env[61964]: _type = "Task" [ 1925.851836] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.860731] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688650, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.362211] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688650, 'name': CreateVM_Task, 'duration_secs': 0.447649} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.362390] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1926.363048] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1926.363217] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1926.363540] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1926.363784] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e29db84-cf9d-4d16-b937-0571ee4febe0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.368223] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for the task: (returnval){ [ 1926.368223] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e09af6-8fe9-222e-d5e4-ab22791ab2fa" [ 1926.368223] env[61964]: _type = "Task" [ 1926.368223] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.376461] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e09af6-8fe9-222e-d5e4-ab22791ab2fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.878936] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1926.879236] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1926.879416] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1927.383712] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1927.383918] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1927.383981] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1927.408614] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.408862] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.408981] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409178] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409328] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409455] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409593] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409734] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.409889] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.410029] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1927.410160] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1927.462143] env[61964]: DEBUG nova.compute.manager [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Received event network-changed-37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1927.462143] env[61964]: DEBUG nova.compute.manager [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Refreshing instance network info cache due to event network-changed-37e96281-e7a9-42e6-998a-0cb07b1afe6c. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1927.462143] env[61964]: DEBUG oslo_concurrency.lockutils [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] Acquiring lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1927.462143] env[61964]: DEBUG oslo_concurrency.lockutils [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] Acquired lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1927.462143] env[61964]: DEBUG nova.network.neutron [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Refreshing network info cache for port 37e96281-e7a9-42e6-998a-0cb07b1afe6c {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1927.789993] env[61964]: DEBUG nova.network.neutron [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Updated VIF entry in instance network info cache for port 37e96281-e7a9-42e6-998a-0cb07b1afe6c. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1927.790368] env[61964]: DEBUG nova.network.neutron [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Updating instance_info_cache with network_info: [{"id": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "address": "fa:16:3e:81:28:c6", "network": {"id": "7a5b224c-9582-456f-a965-c394f45c3be6", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-885685629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f33639e253b8461cbcfc48e472befc2b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37e96281-e7", "ovs_interfaceid": "37e96281-e7a9-42e6-998a-0cb07b1afe6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.799904] env[61964]: DEBUG oslo_concurrency.lockutils [req-d3adeda0-0bac-46b4-84bf-c31a7a0b6952 req-f2270759-4a9e-427b-89aa-f6bf5cd81154 service nova] Releasing lock "refresh_cache-5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1930.384519] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1930.384519] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1933.383784] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1935.378671] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1935.383390] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1935.383565] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1936.384504] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1936.384799] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.383553] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.395558] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1940.395828] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1940.396061] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1940.396454] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1940.397393] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b703f142-2a35-4b47-92e4-08fc50f6f720 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.406409] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f7e6e9-14d5-4b80-8a11-93523d1a9cd0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.420539] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3f55d8-a56b-42dd-9340-92d2b4705d1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.426983] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a4283e-e560-422a-8103-079a37db5158 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.457707] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1940.457707] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1940.457707] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1940.534010] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534351] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534473] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534591] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534706] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534821] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.534935] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.535059] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.535177] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.547034] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.557709] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.568667] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.578009] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 81146564-0c00-4230-9d51-d6cfb68c9597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.587361] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a9d07839-7511-40e1-bf24-c8d83559cffe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.596719] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.610352] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9a228ad9-96ae-471c-961b-60d93c70d6c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.620097] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f9a55f7d-b347-4d07-b98b-18178271d039 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.629831] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 049292ed-1aab-4ea3-930b-f34822b4fb73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.639201] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3699043f-9be3-4997-bc40-6d9bb77fbcba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.649222] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3fd098ca-116f-4bc3-9e39-404bf4968a66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.658303] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.667345] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c68c1fe4-ef07-4bb7-b9be-16e02d0bd855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.676115] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0dad08e5-1725-4e1e-98f1-068da1f9edcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.685377] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance fa5eb87f-8546-4e29-b3d2-0e898d113beb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.694984] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 22d0050d-4654-4e63-ae79-bc209d714635 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.704737] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 149b16e3-d4d7-48c8-a7e4-32d869e82615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.713483] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 720f4e4b-295e-4a1d-af1e-bfa6739844c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1940.713713] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1940.713859] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1941.009171] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90a3931-7fb7-43a4-b60d-ef99231a669b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.016298] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86901d4a-26a0-41a9-8122-55bc87b51d1e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.045081] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3645d147-fd1c-4514-9065-31378b91a13b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.051736] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64169444-42a7-48bf-9126-7609609d8d0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.064066] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.088212] env[61964]: ERROR nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [req-801b29c8-efb6-4633-80b4-5f9ac2e759bc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c64b88bc-0cc7-41f7-af90-1e96b384d8a5. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-801b29c8-efb6-4633-80b4-5f9ac2e759bc"}]} [ 1941.103510] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1941.116271] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1941.116446] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.129018] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1941.147144] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1941.465019] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c70c802-51e0-4973-87f1-b43bba111d12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.470908] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3232f8-8907-4c18-9f91-281e0e13e4f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.499510] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346199a2-b41f-4830-b64c-d73a3c980434 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.506926] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a959c89-8586-43dc-a847-597afe8aca1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.522022] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.559024] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updated inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 1941.559260] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 72 to 73 during operation: update_inventory {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1941.559413] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.575020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1941.575020] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.117s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1953.238183] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1953.238539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1970.712362] env[61964]: WARNING oslo_vmware.rw_handles [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1970.712362] env[61964]: ERROR oslo_vmware.rw_handles [ 1970.713104] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1970.714707] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1970.714978] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Copying Virtual Disk [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/4def83ea-1e06-487e-bec2-7ca95c3c6c30/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1970.715247] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3ebeb3e-22a0-47d8-a47d-90bcd24c9015 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.723580] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 1970.723580] env[61964]: value = "task-1688661" [ 1970.723580] env[61964]: _type = "Task" [ 1970.723580] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.731890] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': task-1688661, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.233993] env[61964]: DEBUG oslo_vmware.exceptions [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1971.234308] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1971.234864] env[61964]: ERROR nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1971.234864] env[61964]: Faults: ['InvalidArgument'] [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Traceback (most recent call last): [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] yield resources [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self.driver.spawn(context, instance, image_meta, [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self._fetch_image_if_missing(context, vi) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] image_cache(vi, tmp_image_ds_loc) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] vm_util.copy_virtual_disk( [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] session._wait_for_task(vmdk_copy_task) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return self.wait_for_task(task_ref) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return evt.wait() [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] result = hub.switch() [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return self.greenlet.switch() [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self.f(*self.args, **self.kw) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] raise exceptions.translate_fault(task_info.error) [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Faults: ['InvalidArgument'] [ 1971.234864] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] [ 1971.235994] env[61964]: INFO nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Terminating instance [ 1971.237067] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1971.237067] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1971.237216] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb2a92b4-f3fb-46f9-995c-6b4e1484dca4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.239390] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1971.239575] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1971.240319] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed8322e-143e-48f5-8bfb-b69b42598be0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.246992] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1971.247237] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c99bd77-9c0b-48d4-9bfa-225a4b1a3a60 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.249484] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1971.249653] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1971.250629] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b14acfc7-2132-4fc0-86cf-a30ed6c57bd2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.255805] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 1971.255805] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a0783b-c055-99e1-6b2d-a3cbd6565337" [ 1971.255805] env[61964]: _type = "Task" [ 1971.255805] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.263691] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a0783b-c055-99e1-6b2d-a3cbd6565337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.322776] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1971.322994] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1971.323235] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleting the datastore file [datastore1] 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1971.323511] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41a53390-fbb0-46e3-8a75-c2ededfa8ad6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.330239] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 1971.330239] env[61964]: value = "task-1688663" [ 1971.330239] env[61964]: _type = "Task" [ 1971.330239] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.337757] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': task-1688663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.766549] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1971.766857] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating directory with path [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1971.766857] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dd6d039-2ef3-49a2-8f9e-227ace0f5359 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.778113] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created directory with path [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1971.778309] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Fetch image to [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1971.778476] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1971.779243] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73afbe2b-7cdd-4edd-ab80-136e90f025da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.787525] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9f9490-65a4-4f6c-83fd-156b81bc28ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.796380] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b98b247-8794-4202-bb91-7973db4c4095 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.825865] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1bf428-a9a3-4d70-b58c-be62109778aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.833723] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2bacb4bb-e628-46cc-bc40-30fa023cd295 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.839681] env[61964]: DEBUG oslo_vmware.api [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': task-1688663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066186} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.839899] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1971.840097] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1971.840268] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1971.840435] env[61964]: INFO nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1971.842576] env[61964]: DEBUG nova.compute.claims [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1971.842746] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1971.842956] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1971.857869] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1971.906702] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1971.967550] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1971.967737] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1972.342788] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a456d548-f19d-46c8-90ce-f41b1d8b20e0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.350844] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737d3e6c-e8ec-416a-95b9-78d265e69f98 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.381962] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e53f3a1-846b-4c64-9178-7d3665782f11 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.389131] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad88d238-7948-4685-930c-24cde217017c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.401871] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.426364] env[61964]: ERROR nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [req-6bb81740-4aff-43c6-9537-00ac3158c80d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c64b88bc-0cc7-41f7-af90-1e96b384d8a5. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6bb81740-4aff-43c6-9537-00ac3158c80d"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1972.440798] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1972.452866] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1972.453076] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.462857] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1972.478859] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1972.768937] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d55af34-6f8e-4878-bdd8-2d89fae7d791 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.776780] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a611df07-ae9e-4fbe-8b94-5d79f9305b21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.805603] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b211ad4-db39-483f-a36f-25a8e44121e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.812527] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8e55c6-9b75-44dc-873d-4b47a772187c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.825947] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.860855] env[61964]: DEBUG nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updated inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with generation 74 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 1972.860855] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 74 to 75 during operation: update_inventory {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1972.860855] env[61964]: DEBUG nova.compute.provider_tree [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.877665] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.035s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1972.878203] env[61964]: ERROR nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1972.878203] env[61964]: Faults: ['InvalidArgument'] [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Traceback (most recent call last): [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self.driver.spawn(context, instance, image_meta, [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self._fetch_image_if_missing(context, vi) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] image_cache(vi, tmp_image_ds_loc) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] vm_util.copy_virtual_disk( [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] session._wait_for_task(vmdk_copy_task) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return self.wait_for_task(task_ref) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return evt.wait() [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] result = hub.switch() [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] return self.greenlet.switch() [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] self.f(*self.args, **self.kw) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] raise exceptions.translate_fault(task_info.error) [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Faults: ['InvalidArgument'] [ 1972.878203] env[61964]: ERROR nova.compute.manager [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] [ 1972.879186] env[61964]: DEBUG nova.compute.utils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1972.880462] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Build of instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 was re-scheduled: A specified parameter was not correct: fileType [ 1972.880462] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1972.881082] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1972.881241] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1972.881417] env[61964]: DEBUG nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1972.881590] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.291923] env[61964]: DEBUG nova.network.neutron [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.308719] env[61964]: INFO nova.compute.manager [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Took 0.43 seconds to deallocate network for instance. [ 1973.432490] env[61964]: INFO nova.scheduler.client.report [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleted allocations for instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 [ 1973.455529] env[61964]: DEBUG oslo_concurrency.lockutils [None req-4a8573f4-e19d-429e-b53c-8c047a6ea67c tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 514.970s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1973.456718] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 316.687s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1973.456953] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1973.457267] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1973.457459] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1973.459702] env[61964]: INFO nova.compute.manager [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Terminating instance [ 1973.461426] env[61964]: DEBUG nova.compute.manager [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1973.461617] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1973.462099] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66786997-057e-400e-911e-11fb0e07db3c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.471347] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b984314-9bfd-4361-a2ed-33106601d4e7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.482803] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1973.502545] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1 could not be found. [ 1973.502782] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1973.502919] env[61964]: INFO nova.compute.manager [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1973.503249] env[61964]: DEBUG oslo.service.loopingcall [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1973.503476] env[61964]: DEBUG nova.compute.manager [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1973.503573] env[61964]: DEBUG nova.network.neutron [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.535016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1973.535339] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1973.536774] env[61964]: INFO nova.compute.claims [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.539855] env[61964]: DEBUG nova.network.neutron [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.549857] env[61964]: INFO nova.compute.manager [-] [instance: 8a3a433a-317c-41a8-9aa6-32d2d3ecaca1] Took 0.05 seconds to deallocate network for instance. [ 1973.638702] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9bc8c60e-4360-4cc3-ac68-4bd6152edaf4 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "8a3a433a-317c-41a8-9aa6-32d2d3ecaca1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1973.890126] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a411431-4a47-49f5-9e25-e405cfd153d2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.897516] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fea3e8-57b0-46e6-91ae-7fb9f9da9596 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.927091] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3c05bf-7bcd-46ef-a1e5-360e35f5394a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.934203] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ece794-904c-4f3f-aae4-34fec6076a45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.947376] env[61964]: DEBUG nova.compute.provider_tree [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.957512] env[61964]: DEBUG nova.scheduler.client.report [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1973.972482] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.437s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1973.973137] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1974.007188] env[61964]: DEBUG nova.compute.utils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1974.008461] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1974.008625] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1974.018316] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1974.083012] env[61964]: DEBUG nova.policy [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '201e92e5878f4f8ca5ef18accf25477d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7eedeebe1674abd8d2e3f33fec488b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 1974.086500] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1974.111122] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1974.111415] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1974.111538] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1974.111717] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1974.111860] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1974.112010] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1974.112225] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1974.112385] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1974.112548] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1974.112706] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1974.112873] env[61964]: DEBUG nova.virt.hardware [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1974.113922] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3a0d44-0fc4-46be-b88f-b12d3af81f1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.122501] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0b7ea2-8599-4814-ab92-2c5141591fc1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.460499] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Successfully created port: c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1975.103440] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Successfully updated port: c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1975.134401] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1975.134564] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquired lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1975.134710] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1975.172129] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1975.572441] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Updating instance_info_cache with network_info: [{"id": "c03455de-c4dd-4998-b624-2104d0a60286", "address": "fa:16:3e:68:15:a9", "network": {"id": "8aeb085d-cd8c-441e-9a87-1f43bdf92623", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-116159592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7eedeebe1674abd8d2e3f33fec488b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc03455de-c4", "ovs_interfaceid": "c03455de-c4dd-4998-b624-2104d0a60286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.586218] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Releasing lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1975.586524] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance network_info: |[{"id": "c03455de-c4dd-4998-b624-2104d0a60286", "address": "fa:16:3e:68:15:a9", "network": {"id": "8aeb085d-cd8c-441e-9a87-1f43bdf92623", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-116159592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7eedeebe1674abd8d2e3f33fec488b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc03455de-c4", "ovs_interfaceid": "c03455de-c4dd-4998-b624-2104d0a60286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1975.587308] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:15:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c03455de-c4dd-4998-b624-2104d0a60286', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.594957] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Creating folder: Project (f7eedeebe1674abd8d2e3f33fec488b5). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1975.595608] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-505be63e-a54b-44b0-892a-493f59a2b3e9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.598716] env[61964]: DEBUG nova.compute.manager [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Received event network-vif-plugged-c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1975.598916] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Acquiring lock "59c25eab-011e-4690-99fe-976f8dbea580-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1975.599161] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Lock "59c25eab-011e-4690-99fe-976f8dbea580-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1975.599332] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Lock "59c25eab-011e-4690-99fe-976f8dbea580-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1975.599496] env[61964]: DEBUG nova.compute.manager [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] No waiting events found dispatching network-vif-plugged-c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1975.599654] env[61964]: WARNING nova.compute.manager [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Received unexpected event network-vif-plugged-c03455de-c4dd-4998-b624-2104d0a60286 for instance with vm_state building and task_state spawning. [ 1975.599806] env[61964]: DEBUG nova.compute.manager [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Received event network-changed-c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1975.599953] env[61964]: DEBUG nova.compute.manager [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Refreshing instance network info cache due to event network-changed-c03455de-c4dd-4998-b624-2104d0a60286. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1975.600165] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Acquiring lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1975.600309] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Acquired lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1975.600459] env[61964]: DEBUG nova.network.neutron [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Refreshing network info cache for port c03455de-c4dd-4998-b624-2104d0a60286 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1975.612475] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Created folder: Project (f7eedeebe1674abd8d2e3f33fec488b5) in parent group-v351942. [ 1975.612648] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Creating folder: Instances. Parent ref: group-v352007. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1975.613174] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a6f23b7-5293-4a59-9b0c-e8efc56d0407 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.621321] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Created folder: Instances in parent group-v352007. [ 1975.621579] env[61964]: DEBUG oslo.service.loopingcall [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.621775] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1975.621973] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f622ced4-bff9-4103-974c-2bf6302b9474 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.640426] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.640426] env[61964]: value = "task-1688666" [ 1975.640426] env[61964]: _type = "Task" [ 1975.640426] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.649531] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688666, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.877358] env[61964]: DEBUG nova.network.neutron [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Updated VIF entry in instance network info cache for port c03455de-c4dd-4998-b624-2104d0a60286. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1975.877757] env[61964]: DEBUG nova.network.neutron [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Updating instance_info_cache with network_info: [{"id": "c03455de-c4dd-4998-b624-2104d0a60286", "address": "fa:16:3e:68:15:a9", "network": {"id": "8aeb085d-cd8c-441e-9a87-1f43bdf92623", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-116159592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f7eedeebe1674abd8d2e3f33fec488b5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc03455de-c4", "ovs_interfaceid": "c03455de-c4dd-4998-b624-2104d0a60286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.887797] env[61964]: DEBUG oslo_concurrency.lockutils [req-437b979a-1378-4656-aae3-3e00a9d5532c req-aecdb588-45d3-4b2c-be0c-0f958ec7ac1c service nova] Releasing lock "refresh_cache-59c25eab-011e-4690-99fe-976f8dbea580" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1976.150860] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688666, 'name': CreateVM_Task, 'duration_secs': 0.296518} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.151161] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1976.151787] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1976.151862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1976.152311] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1976.152395] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e49cd4bd-501a-4d92-99e2-3831a5100069 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.157204] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for the task: (returnval){ [ 1976.157204] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529baca8-b9a3-49a4-f41a-e55706df5463" [ 1976.157204] env[61964]: _type = "Task" [ 1976.157204] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.164812] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529baca8-b9a3-49a4-f41a-e55706df5463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.667981] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1976.668992] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.668992] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1977.460764] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1977.461072] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1985.797615] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1990.576029] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1990.576337] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1990.576337] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1990.598420] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.598572] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.598700] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.598823] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.598944] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599075] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599271] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599408] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599531] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599648] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1990.599769] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1990.600299] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1991.383991] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.384848] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.385154] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1993.394384] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1994.393530] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.379547] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.383968] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.385030] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1997.384862] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.384589] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.384872] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.385221] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2001.395247] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.407601] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2001.407834] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2001.408018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2001.408180] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2001.409369] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350cd897-5946-43e5-9f7f-3959df3e190e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.418322] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4141695d-c395-42b8-8208-b73c0fb2632c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.432222] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d33041-4480-4f2f-8b95-0ec86efcfb2e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.438364] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9055a89e-cdb9-42c2-b20b-84cc239f0492 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.467549] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181339MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2001.467700] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2001.467886] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2001.619633] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.619771] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.619898] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620032] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620148] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620333] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620478] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620597] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620711] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.620823] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2001.631830] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.643137] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.654587] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 81146564-0c00-4230-9d51-d6cfb68c9597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.664761] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a9d07839-7511-40e1-bf24-c8d83559cffe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.674054] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.683512] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9a228ad9-96ae-471c-961b-60d93c70d6c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.692350] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f9a55f7d-b347-4d07-b98b-18178271d039 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.702321] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 049292ed-1aab-4ea3-930b-f34822b4fb73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.711247] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3699043f-9be3-4997-bc40-6d9bb77fbcba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.720131] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3fd098ca-116f-4bc3-9e39-404bf4968a66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.729116] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.738371] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c68c1fe4-ef07-4bb7-b9be-16e02d0bd855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.747110] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0dad08e5-1725-4e1e-98f1-068da1f9edcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.756216] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance fa5eb87f-8546-4e29-b3d2-0e898d113beb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.765123] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 22d0050d-4654-4e63-ae79-bc209d714635 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.774730] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 149b16e3-d4d7-48c8-a7e4-32d869e82615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.783841] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 720f4e4b-295e-4a1d-af1e-bfa6739844c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.792728] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.801449] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2001.801680] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2001.801826] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2002.195595] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c86499-4b65-4b76-a697-7a5e79366435 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.203045] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87097462-ae19-41f4-86db-075356000032 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.232244] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60f79ad-db91-4c97-89c6-82d0ce30ee8f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.239386] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a27cdf-9dc9-4922-bd85-7141e7044c51 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.252060] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.261527] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2002.276139] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2002.276323] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.808s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2005.261473] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.384119] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.761920] env[61964]: WARNING oslo_vmware.rw_handles [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.761920] env[61964]: ERROR oslo_vmware.rw_handles [ 2017.762603] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2017.764625] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2017.764950] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Copying Virtual Disk [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/6fd9dbdb-b6c0-46e7-a0b6-94026e5ba371/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2017.765236] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-759d75eb-dace-424a-bcd9-45b95964169c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.773496] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2017.773496] env[61964]: value = "task-1688667" [ 2017.773496] env[61964]: _type = "Task" [ 2017.773496] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.782072] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.285424] env[61964]: DEBUG oslo_vmware.exceptions [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2018.285663] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2018.286273] env[61964]: ERROR nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.286273] env[61964]: Faults: ['InvalidArgument'] [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Traceback (most recent call last): [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] yield resources [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self.driver.spawn(context, instance, image_meta, [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self._fetch_image_if_missing(context, vi) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] image_cache(vi, tmp_image_ds_loc) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] vm_util.copy_virtual_disk( [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] session._wait_for_task(vmdk_copy_task) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return self.wait_for_task(task_ref) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return evt.wait() [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] result = hub.switch() [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return self.greenlet.switch() [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self.f(*self.args, **self.kw) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] raise exceptions.translate_fault(task_info.error) [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Faults: ['InvalidArgument'] [ 2018.286273] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] [ 2018.287348] env[61964]: INFO nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Terminating instance [ 2018.288189] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2018.289109] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.289109] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-874561b2-4264-4ff7-8ca8-5a4de2a2b638 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.290753] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2018.290956] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2018.291711] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe60f77-d254-46c6-a1d9-303b1d403cbb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.298313] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2018.298527] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6f44ecf-ac14-4abf-a586-1fab680ebb4f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.300726] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.300885] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2018.301795] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426bc495-3e36-4370-86e0-adea45b8a768 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.306101] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for the task: (returnval){ [ 2018.306101] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f496cb-c5ab-bb0e-a97d-9c388eb77df1" [ 2018.306101] env[61964]: _type = "Task" [ 2018.306101] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.313254] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f496cb-c5ab-bb0e-a97d-9c388eb77df1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.371753] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2018.371973] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2018.372177] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleting the datastore file [datastore1] b8231080-7a09-4e00-ab2b-e9ff4abf352a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.372435] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e507bca-d14c-488c-b834-cd5ad6bbcc25 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.379033] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2018.379033] env[61964]: value = "task-1688669" [ 2018.379033] env[61964]: _type = "Task" [ 2018.379033] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.386281] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.816198] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.816462] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Creating directory with path [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.816730] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df74191d-44eb-4d70-b33d-c82f5932c797 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.828031] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Created directory with path [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.828236] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Fetch image to [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.828397] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2018.829189] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc71518-7442-4564-82e1-0ac6933a785b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.835958] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6253e36-c319-43e9-af8a-fdb40169e7ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.845451] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9782e19f-3f4a-4d4b-bd7f-88b67e4aa815 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.875940] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39cb27b-ea70-49e9-8315-042a822eab32 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.884941] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fa93cb1b-6eae-4f18-a2a4-72d2559bf7bb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.889597] env[61964]: DEBUG oslo_vmware.api [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081916} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.890166] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.890570] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2018.890570] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2018.890770] env[61964]: INFO nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2018.892877] env[61964]: DEBUG nova.compute.claims [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2018.893088] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2018.893282] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2018.911124] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2018.970909] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2019.034734] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2019.034992] env[61964]: DEBUG oslo_vmware.rw_handles [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2019.322724] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5c493b-b941-4876-9e31-092195fe3006 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.332059] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263ba96a-5b89-4216-b338-ad6c229bf161 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.361534] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80a42b9-a175-4e6e-b7e6-d16ebf0eccf9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.369093] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a704ee53-bc67-48bf-9bca-3b1edda3a89e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.382023] env[61964]: DEBUG nova.compute.provider_tree [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2019.390413] env[61964]: DEBUG nova.scheduler.client.report [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2019.405951] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.512s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2019.406387] env[61964]: ERROR nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.406387] env[61964]: Faults: ['InvalidArgument'] [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Traceback (most recent call last): [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self.driver.spawn(context, instance, image_meta, [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self._fetch_image_if_missing(context, vi) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] image_cache(vi, tmp_image_ds_loc) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] vm_util.copy_virtual_disk( [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] session._wait_for_task(vmdk_copy_task) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return self.wait_for_task(task_ref) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return evt.wait() [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] result = hub.switch() [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] return self.greenlet.switch() [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] self.f(*self.args, **self.kw) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] raise exceptions.translate_fault(task_info.error) [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Faults: ['InvalidArgument'] [ 2019.406387] env[61964]: ERROR nova.compute.manager [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] [ 2019.407482] env[61964]: DEBUG nova.compute.utils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2019.408522] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Build of instance b8231080-7a09-4e00-ab2b-e9ff4abf352a was re-scheduled: A specified parameter was not correct: fileType [ 2019.408522] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2019.408887] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2019.409249] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2019.409595] env[61964]: DEBUG nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2019.409895] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.748737] env[61964]: DEBUG nova.network.neutron [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.758626] env[61964]: INFO nova.compute.manager [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Took 0.35 seconds to deallocate network for instance. [ 2019.861343] env[61964]: INFO nova.scheduler.client.report [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleted allocations for instance b8231080-7a09-4e00-ab2b-e9ff4abf352a [ 2019.884660] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c1ceb810-7185-4991-a1fd-08b9db6b2b4c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 554.634s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2019.886190] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 355.850s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2019.886190] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2019.886476] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2019.886476] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2019.888950] env[61964]: INFO nova.compute.manager [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Terminating instance [ 2019.890790] env[61964]: DEBUG nova.compute.manager [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2019.890976] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2019.891241] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-763549ee-47c0-4159-8763-6274023fb371 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.905552] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66124c5a-cce4-426d-8d5a-e46b14acb47f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.919478] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2019.942938] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8231080-7a09-4e00-ab2b-e9ff4abf352a could not be found. [ 2019.944043] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2019.944043] env[61964]: INFO nova.compute.manager [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2019.944043] env[61964]: DEBUG oslo.service.loopingcall [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.944043] env[61964]: DEBUG nova.compute.manager [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2019.944043] env[61964]: DEBUG nova.network.neutron [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.972446] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2019.972446] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2019.973504] env[61964]: INFO nova.compute.claims [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2019.976624] env[61964]: DEBUG nova.network.neutron [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.985970] env[61964]: INFO nova.compute.manager [-] [instance: b8231080-7a09-4e00-ab2b-e9ff4abf352a] Took 0.04 seconds to deallocate network for instance. [ 2020.094975] env[61964]: DEBUG oslo_concurrency.lockutils [None req-baa650e4-2424-4a5e-9a62-d4041c3d6894 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "b8231080-7a09-4e00-ab2b-e9ff4abf352a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2020.338317] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756fdff6-22ea-40b6-bb0a-2683de152d47 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.345729] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9df33c4-e406-442f-baed-11388b1a87e5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.379993] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3345f75b-e4c2-4b97-b4ef-87ec8268414d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.388043] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1ea355-9182-4fe6-9f94-22e8fca07cfc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.400943] env[61964]: DEBUG nova.compute.provider_tree [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.411245] env[61964]: DEBUG nova.scheduler.client.report [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2020.427440] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.455s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2020.427928] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2020.462352] env[61964]: DEBUG nova.compute.utils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2020.463794] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2020.463965] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2020.473582] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2020.537716] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2020.549318] env[61964]: DEBUG nova.policy [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46f96afd6f6749859606fb5ff1b20bc9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adec86fd331649de9cf354d21ae7d839', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2020.566587] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:46:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d0d76d8a-c435-4bc3-8d13-fcddebd0ddff',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1295211086',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2020.566830] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2020.566981] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2020.567178] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2020.567322] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2020.567481] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2020.567656] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2020.567810] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2020.567972] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2020.568278] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2020.568468] env[61964]: DEBUG nova.virt.hardware [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2020.569356] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeead1fa-7921-42fa-bebc-7a5637a60f51 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.577909] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f3de81-9a75-4b32-a988-5b744db56af7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.895120] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Successfully created port: 162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2021.654901] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Successfully updated port: 162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2021.666803] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2021.666959] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2021.667120] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2021.715136] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2021.814644] env[61964]: DEBUG nova.compute.manager [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Received event network-vif-plugged-162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2021.814857] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Acquiring lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2021.815077] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2021.815244] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2021.815408] env[61964]: DEBUG nova.compute.manager [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] No waiting events found dispatching network-vif-plugged-162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2021.815567] env[61964]: WARNING nova.compute.manager [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Received unexpected event network-vif-plugged-162d33c0-420d-4759-a45b-6ad8143b5414 for instance with vm_state building and task_state spawning. [ 2021.815727] env[61964]: DEBUG nova.compute.manager [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Received event network-changed-162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2021.815902] env[61964]: DEBUG nova.compute.manager [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Refreshing instance network info cache due to event network-changed-162d33c0-420d-4759-a45b-6ad8143b5414. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2021.816101] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Acquiring lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2021.900906] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Updating instance_info_cache with network_info: [{"id": "162d33c0-420d-4759-a45b-6ad8143b5414", "address": "fa:16:3e:77:a4:80", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162d33c0-42", "ovs_interfaceid": "162d33c0-420d-4759-a45b-6ad8143b5414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.912249] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2021.912549] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance network_info: |[{"id": "162d33c0-420d-4759-a45b-6ad8143b5414", "address": "fa:16:3e:77:a4:80", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162d33c0-42", "ovs_interfaceid": "162d33c0-420d-4759-a45b-6ad8143b5414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2021.912845] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Acquired lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2021.913031] env[61964]: DEBUG nova.network.neutron [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Refreshing network info cache for port 162d33c0-420d-4759-a45b-6ad8143b5414 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2021.914044] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:a4:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '162d33c0-420d-4759-a45b-6ad8143b5414', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2021.921557] env[61964]: DEBUG oslo.service.loopingcall [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2021.922387] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2021.924710] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21eefb62-82a3-4729-8aea-a8a0e12ddca0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.947989] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2021.947989] env[61964]: value = "task-1688670" [ 2021.947989] env[61964]: _type = "Task" [ 2021.947989] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.956084] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688670, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.220499] env[61964]: DEBUG nova.network.neutron [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Updated VIF entry in instance network info cache for port 162d33c0-420d-4759-a45b-6ad8143b5414. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2022.220898] env[61964]: DEBUG nova.network.neutron [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Updating instance_info_cache with network_info: [{"id": "162d33c0-420d-4759-a45b-6ad8143b5414", "address": "fa:16:3e:77:a4:80", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap162d33c0-42", "ovs_interfaceid": "162d33c0-420d-4759-a45b-6ad8143b5414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.232339] env[61964]: DEBUG oslo_concurrency.lockutils [req-d049b329-5d79-4754-aecf-fd8037832d9f req-8fb690ca-0290-4289-8541-0e0b3a57f845 service nova] Releasing lock "refresh_cache-b6c97be0-e146-46b1-8d2e-085818e45835" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2022.458031] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688670, 'name': CreateVM_Task, 'duration_secs': 0.294666} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.458031] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2022.458396] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2022.458558] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2022.458865] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2022.459125] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b5f7cb-5de4-4582-b95b-c06aadf92257 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.463210] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 2022.463210] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5225b82f-3f75-db31-50ff-b5570b5507b6" [ 2022.463210] env[61964]: _type = "Task" [ 2022.463210] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.470094] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5225b82f-3f75-db31-50ff-b5570b5507b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.973973] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2022.973973] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2022.973973] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2026.290966] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "59c25eab-011e-4690-99fe-976f8dbea580" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.551222] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.576777] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 2029.576777] env[61964]: value = "domain-c8" [ 2029.576777] env[61964]: _type = "ClusterComputeResource" [ 2029.576777] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2029.579864] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a30e84e-6176-42f1-a61d-a1123c345dcc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.597473] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 10 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2029.597652] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 34e97fca-5664-418e-bb12-8c16ddb3b0c9 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.597840] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid ac955d73-c844-4b98-b791-7d7c749c6954 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.600350] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid ae85ff01-5625-453d-9dcf-c8417fbb6e0c {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.600583] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 032f2d6d-04c3-4210-a8d0-1c325a304a88 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.600754] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 2b69def4-b892-4d76-bfd2-841014f75098 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.600925] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid c73b38c1-53d0-4c98-814f-b6b8984bbaf5 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.601117] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 66787186-e8c6-4700-9caf-bd7e7970b65d {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.601231] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.601383] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 59c25eab-011e-4690-99fe-976f8dbea580 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.601528] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid b6c97be0-e146-46b1-8d2e-085818e45835 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2029.601868] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.602144] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "ac955d73-c844-4b98-b791-7d7c749c6954" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.602352] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.602542] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.602730] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "2b69def4-b892-4d76-bfd2-841014f75098" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.602916] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.603122] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "66787186-e8c6-4700-9caf-bd7e7970b65d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.603311] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.603494] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "59c25eab-011e-4690-99fe-976f8dbea580" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2029.603681] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "b6c97be0-e146-46b1-8d2e-085818e45835" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2050.436960] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2050.436960] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2050.436960] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2050.461484] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461484] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461484] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461484] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461484] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461810] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461810] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461810] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.461913] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.462083] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2050.462185] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2050.462778] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2050.835064] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "b6c97be0-e146-46b1-8d2e-085818e45835" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2051.383562] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.942087] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "9ae01818-da08-4137-97c0-bc4c57759d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2055.942428] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2055.973492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2055.974640] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2056.384465] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.384363] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.384626] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2058.379659] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.384788] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2060.384749] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2061.508797] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2061.509121] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.383653] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.396462] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.396737] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.396886] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2062.397058] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2062.398186] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a6b611-f80e-4d05-b182-6df8650b1f13 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.406820] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634b2732-09ce-4b0a-9914-eafa53855da1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.420710] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18a289d-f72a-471a-91c8-488ace2afccf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.427050] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60732e98-b6c3-4bae-ad8b-b4ab7c5267cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.457501] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181339MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2062.457648] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.457846] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.536820] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537159] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ac955d73-c844-4b98-b791-7d7c749c6954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537296] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537432] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537553] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537671] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537837] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.537896] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.538019] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.538171] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2062.549639] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3699043f-9be3-4997-bc40-6d9bb77fbcba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.561377] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3fd098ca-116f-4bc3-9e39-404bf4968a66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.571911] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.581937] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c68c1fe4-ef07-4bb7-b9be-16e02d0bd855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.591523] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 0dad08e5-1725-4e1e-98f1-068da1f9edcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.601251] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance fa5eb87f-8546-4e29-b3d2-0e898d113beb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.611450] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 22d0050d-4654-4e63-ae79-bc209d714635 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.621110] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 149b16e3-d4d7-48c8-a7e4-32d869e82615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.630397] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 720f4e4b-295e-4a1d-af1e-bfa6739844c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.639549] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.648747] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.658845] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.668246] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.677347] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2062.677574] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2062.677720] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2062.936865] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2387dcc4-1262-4af9-bd94-f9bcfb5dec8e tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "d0cd87ae-53f6-4b03-9b49-b84b34cea243" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.937137] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2387dcc4-1262-4af9-bd94-f9bcfb5dec8e tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "d0cd87ae-53f6-4b03-9b49-b84b34cea243" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.966022] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f175bac-7700-425f-ab61-16e9b50badd9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.973496] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa8c129-aedd-4e3f-97cd-8c29418279e3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.002837] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bd1350-2456-4d16-b04e-9fb9958bebc9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.010013] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9daee94f-aca7-4865-9afa-a0da5edc7037 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.022378] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2063.030290] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2063.044704] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2063.044704] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.587s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2067.144278] env[61964]: WARNING oslo_vmware.rw_handles [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2067.144278] env[61964]: ERROR oslo_vmware.rw_handles [ 2067.145090] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2067.146826] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2067.149800] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Copying Virtual Disk [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/c0ec5c08-5fbc-4d58-b7d7-c983b0a26e91/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2067.150813] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bd72d7c-08e5-475c-b4b1-dd17d6cb4bee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.159503] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for the task: (returnval){ [ 2067.159503] env[61964]: value = "task-1688671" [ 2067.159503] env[61964]: _type = "Task" [ 2067.159503] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.168759] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Task: {'id': task-1688671, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.670912] env[61964]: DEBUG oslo_vmware.exceptions [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2067.671224] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2067.671762] env[61964]: ERROR nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.671762] env[61964]: Faults: ['InvalidArgument'] [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Traceback (most recent call last): [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] yield resources [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self.driver.spawn(context, instance, image_meta, [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self._fetch_image_if_missing(context, vi) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] image_cache(vi, tmp_image_ds_loc) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] vm_util.copy_virtual_disk( [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] session._wait_for_task(vmdk_copy_task) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return self.wait_for_task(task_ref) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return evt.wait() [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] result = hub.switch() [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return self.greenlet.switch() [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self.f(*self.args, **self.kw) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] raise exceptions.translate_fault(task_info.error) [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Faults: ['InvalidArgument'] [ 2067.671762] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] [ 2067.672674] env[61964]: INFO nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Terminating instance [ 2067.673633] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2067.673834] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.674463] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2067.674653] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2067.674882] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a0c9f15-b6b1-4214-961c-242b52c8e32e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.677451] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66858bb-9d79-414d-bde4-7a2fc762d88c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.684750] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2067.685723] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f44fcf2-f67d-4bc1-85f3-18144e3b93a9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.687142] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.687312] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2067.687989] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94655c7-9bff-4ee9-a393-7d2eec899656 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.693378] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2067.693378] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522d8d5f-ddad-a75d-cba3-6df64a55b4a7" [ 2067.693378] env[61964]: _type = "Task" [ 2067.693378] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.701924] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522d8d5f-ddad-a75d-cba3-6df64a55b4a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.758056] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2067.758195] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2067.758290] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Deleting the datastore file [datastore1] 34e97fca-5664-418e-bb12-8c16ddb3b0c9 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2067.758555] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf36f063-fe9d-46fc-9966-2dbc21aa7859 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.764262] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for the task: (returnval){ [ 2067.764262] env[61964]: value = "task-1688673" [ 2067.764262] env[61964]: _type = "Task" [ 2067.764262] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.773658] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Task: {'id': task-1688673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.205243] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2068.205527] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating directory with path [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.206324] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82703202-d003-40ef-b3cb-514dc65b4c65 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.219549] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created directory with path [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.219872] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Fetch image to [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2068.220144] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2068.220872] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a74c9ab-3220-4d06-bed5-7e1544c63f7e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.227789] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b331e561-2dc7-4942-979a-930ba8b41baa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.236731] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0507d384-ecfc-444b-aa83-96ac9bfbdc59 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.276318] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a044b31-6070-4caa-8958-b1568f1509ef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.284449] env[61964]: DEBUG oslo_vmware.api [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Task: {'id': task-1688673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08409} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.286458] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2068.286722] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2068.286960] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.287205] env[61964]: INFO nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2068.289363] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9474bff2-455a-41a8-932a-f559ead544f5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.291802] env[61964]: DEBUG nova.compute.claims [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2068.292078] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2068.292375] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2068.318514] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2068.520522] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2068.584955] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2068.585867] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2068.758219] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2152441f-1e76-44e7-907e-2e8b1d50d2e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.766458] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da09f4f-505a-4f31-8e43-f0e5669a57d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.799528] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6aa9c7-a30f-4cc4-91bb-f8bc0e9f2193 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.806954] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57cdc9d-be7a-42a7-9a6e-2d0c91c073d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.820369] env[61964]: DEBUG nova.compute.provider_tree [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.831012] env[61964]: DEBUG nova.scheduler.client.report [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2068.847476] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.555s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2068.848014] env[61964]: ERROR nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2068.848014] env[61964]: Faults: ['InvalidArgument'] [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Traceback (most recent call last): [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self.driver.spawn(context, instance, image_meta, [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self._fetch_image_if_missing(context, vi) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] image_cache(vi, tmp_image_ds_loc) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] vm_util.copy_virtual_disk( [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] session._wait_for_task(vmdk_copy_task) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return self.wait_for_task(task_ref) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return evt.wait() [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] result = hub.switch() [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] return self.greenlet.switch() [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] self.f(*self.args, **self.kw) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] raise exceptions.translate_fault(task_info.error) [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Faults: ['InvalidArgument'] [ 2068.848014] env[61964]: ERROR nova.compute.manager [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] [ 2068.848872] env[61964]: DEBUG nova.compute.utils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2068.850148] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Build of instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 was re-scheduled: A specified parameter was not correct: fileType [ 2068.850148] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2068.850511] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2068.850691] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2068.850863] env[61964]: DEBUG nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2068.851052] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2069.271524] env[61964]: DEBUG oslo_concurrency.lockutils [None req-251dab60-8ba6-436b-87fa-8f09ceef3bad tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "abb43b76-b3df-4d4a-b1f4-801306d0f01f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2069.271842] env[61964]: DEBUG oslo_concurrency.lockutils [None req-251dab60-8ba6-436b-87fa-8f09ceef3bad tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "abb43b76-b3df-4d4a-b1f4-801306d0f01f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2069.530556] env[61964]: DEBUG nova.network.neutron [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.573581] env[61964]: INFO nova.compute.manager [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Took 0.72 seconds to deallocate network for instance. [ 2069.699666] env[61964]: INFO nova.scheduler.client.report [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Deleted allocations for instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 [ 2069.732938] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ec44eb1d-a6ba-4906-8d17-d67037b89b7e tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 601.115s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.734215] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 402.803s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2069.734438] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Acquiring lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2069.734640] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2069.734951] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.737010] env[61964]: INFO nova.compute.manager [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Terminating instance [ 2069.739895] env[61964]: DEBUG nova.compute.manager [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2069.740115] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2069.740383] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f98b1d3d-5cd8-407f-89c7-177a089a938c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.749825] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbbf0b5-fd91-46df-8576-f5b1fc54b56a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.760900] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: 9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2069.784539] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 34e97fca-5664-418e-bb12-8c16ddb3b0c9 could not be found. [ 2069.784755] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2069.786072] env[61964]: INFO nova.compute.manager [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2069.786072] env[61964]: DEBUG oslo.service.loopingcall [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2069.786072] env[61964]: DEBUG nova.compute.manager [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2069.786072] env[61964]: DEBUG nova.network.neutron [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2069.800280] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: 9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2069.812516] env[61964]: DEBUG nova.network.neutron [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2069.824111] env[61964]: INFO nova.compute.manager [-] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] Took 0.04 seconds to deallocate network for instance. [ 2069.834015] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "9be6beb3-6ed3-4b2d-8c56-8c08ab3507d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.137s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.845219] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: 81146564-0c00-4230-9d51-d6cfb68c9597] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2069.873596] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: 81146564-0c00-4230-9d51-d6cfb68c9597] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2069.896356] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "81146564-0c00-4230-9d51-d6cfb68c9597" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.173s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.905305] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: a9d07839-7511-40e1-bf24-c8d83559cffe] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2069.939283] env[61964]: DEBUG oslo_concurrency.lockutils [None req-54aa50c3-537c-4a25-aab7-003d1ce84a8b tempest-FloatingIPsAssociationNegativeTestJSON-406127074 tempest-FloatingIPsAssociationNegativeTestJSON-406127074-project-member] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.940179] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 40.338s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2069.940367] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 34e97fca-5664-418e-bb12-8c16ddb3b0c9] During sync_power_state the instance has a pending task (deleting). Skip. [ 2069.940539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "34e97fca-5664-418e-bb12-8c16ddb3b0c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.942072] env[61964]: DEBUG nova.compute.manager [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] [instance: a9d07839-7511-40e1-bf24-c8d83559cffe] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2069.961480] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96e28d24-5338-465e-a65b-b9de4ab79b75 tempest-ListServersNegativeTestJSON-964528201 tempest-ListServersNegativeTestJSON-964528201-project-member] Lock "a9d07839-7511-40e1-bf24-c8d83559cffe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.200s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2069.969999] env[61964]: DEBUG nova.compute.manager [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2069.991844] env[61964]: DEBUG nova.compute.manager [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.011878] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "6de8b76d-ea5a-49c7-8e38-e19a8e25ef3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.233s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.019962] env[61964]: DEBUG nova.compute.manager [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9a228ad9-96ae-471c-961b-60d93c70d6c2] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.043317] env[61964]: DEBUG nova.compute.manager [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9a228ad9-96ae-471c-961b-60d93c70d6c2] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.063154] env[61964]: DEBUG oslo_concurrency.lockutils [None req-65ca14af-d8d2-43e3-be69-e54976cca6f7 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9a228ad9-96ae-471c-961b-60d93c70d6c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.260s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.070933] env[61964]: DEBUG nova.compute.manager [None req-409c2344-e23f-45f7-aad4-39a137ab723c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: f9a55f7d-b347-4d07-b98b-18178271d039] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.093461] env[61964]: DEBUG nova.compute.manager [None req-409c2344-e23f-45f7-aad4-39a137ab723c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: f9a55f7d-b347-4d07-b98b-18178271d039] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.114926] env[61964]: DEBUG oslo_concurrency.lockutils [None req-409c2344-e23f-45f7-aad4-39a137ab723c tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "f9a55f7d-b347-4d07-b98b-18178271d039" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.779s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.125044] env[61964]: DEBUG nova.compute.manager [None req-e4bc9351-fecf-4365-b0d0-f0a4888e5274 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 049292ed-1aab-4ea3-930b-f34822b4fb73] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.147504] env[61964]: DEBUG nova.compute.manager [None req-e4bc9351-fecf-4365-b0d0-f0a4888e5274 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 049292ed-1aab-4ea3-930b-f34822b4fb73] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.171032] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e4bc9351-fecf-4365-b0d0-f0a4888e5274 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "049292ed-1aab-4ea3-930b-f34822b4fb73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.558s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.180895] env[61964]: DEBUG nova.compute.manager [None req-d4f910d7-1107-43f5-8a50-73aae32a9344 tempest-ServersNegativeTestJSON-817738709 tempest-ServersNegativeTestJSON-817738709-project-member] [instance: 3699043f-9be3-4997-bc40-6d9bb77fbcba] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.203905] env[61964]: DEBUG nova.compute.manager [None req-d4f910d7-1107-43f5-8a50-73aae32a9344 tempest-ServersNegativeTestJSON-817738709 tempest-ServersNegativeTestJSON-817738709-project-member] [instance: 3699043f-9be3-4997-bc40-6d9bb77fbcba] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.225017] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d4f910d7-1107-43f5-8a50-73aae32a9344 tempest-ServersNegativeTestJSON-817738709 tempest-ServersNegativeTestJSON-817738709-project-member] Lock "3699043f-9be3-4997-bc40-6d9bb77fbcba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.716s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.234320] env[61964]: DEBUG nova.compute.manager [None req-43325e3d-4d97-40dc-b56f-04e2175cf04d tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 3fd098ca-116f-4bc3-9e39-404bf4968a66] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.256788] env[61964]: DEBUG nova.compute.manager [None req-43325e3d-4d97-40dc-b56f-04e2175cf04d tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 3fd098ca-116f-4bc3-9e39-404bf4968a66] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2070.281377] env[61964]: DEBUG oslo_concurrency.lockutils [None req-43325e3d-4d97-40dc-b56f-04e2175cf04d tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "3fd098ca-116f-4bc3-9e39-404bf4968a66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.391s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.292079] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2070.341666] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2070.341914] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2070.343399] env[61964]: INFO nova.compute.claims [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2070.648484] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc0241d-01c4-4998-89d2-7ef776159c11 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.656021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833e198c-38f3-4e0c-b704-cf43a7685191 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.686081] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c96a183-0d69-4c4b-bed4-bcc5134581bf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.694518] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054886d8-24b9-441d-8143-b1ecf89c3ca4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.707580] env[61964]: DEBUG nova.compute.provider_tree [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.720776] env[61964]: DEBUG nova.scheduler.client.report [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2070.737054] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.395s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2070.737554] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2070.774032] env[61964]: DEBUG nova.compute.utils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2070.775094] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2070.775094] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2070.783950] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2070.835264] env[61964]: DEBUG nova.policy [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b94cc3cb460f4cc6a264dd70c4c4d064', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40c88d0201ba4420a2e8ae0d237c29f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2070.849508] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2070.874755] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2070.874993] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2070.875164] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2070.875342] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2070.875485] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2070.875628] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2070.875825] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2070.875979] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2070.876157] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2070.876316] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2070.876486] env[61964]: DEBUG nova.virt.hardware [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2070.877335] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b594b6f-f702-41f4-bfd2-9d500bf55f8e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.885214] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5423bc-c963-4d5c-aaea-303ede7eff7f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.153592] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Successfully created port: 67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2071.521878] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Successfully created port: 55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2072.187961] env[61964]: DEBUG nova.compute.manager [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received event network-vif-plugged-67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2072.188207] env[61964]: DEBUG oslo_concurrency.lockutils [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] Acquiring lock "63911858-5a79-4479-8c92-46afca980300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2072.188492] env[61964]: DEBUG oslo_concurrency.lockutils [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] Lock "63911858-5a79-4479-8c92-46afca980300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2072.188676] env[61964]: DEBUG oslo_concurrency.lockutils [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] Lock "63911858-5a79-4479-8c92-46afca980300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2072.188841] env[61964]: DEBUG nova.compute.manager [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] No waiting events found dispatching network-vif-plugged-67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2072.189006] env[61964]: WARNING nova.compute.manager [req-b01dedcc-bbe7-4123-b6c4-668032c7483e req-5c879127-88d1-4254-90c7-fb2988e8e760 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received unexpected event network-vif-plugged-67fc8868-0816-4461-8ed9-1c34200f5e16 for instance with vm_state building and task_state spawning. [ 2072.263948] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Successfully updated port: 67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2073.203313] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "63911858-5a79-4479-8c92-46afca980300" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2073.253687] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Successfully updated port: 55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2073.271729] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2073.271865] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2073.272099] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2073.326129] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2073.744416] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Updating instance_info_cache with network_info: [{"id": "67fc8868-0816-4461-8ed9-1c34200f5e16", "address": "fa:16:3e:20:90:bd", "network": {"id": "c1c79d63-5676-4e03-a591-cb046d461540", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-617863065", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67fc8868-08", "ovs_interfaceid": "67fc8868-0816-4461-8ed9-1c34200f5e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55c7ff44-dbe4-4808-9547-501e885f456e", "address": "fa:16:3e:82:45:e1", "network": {"id": "427d54ba-0e99-4eb2-b8b7-a25bbdc86070", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-679417950", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c7ff44-db", "ovs_interfaceid": "55c7ff44-dbe4-4808-9547-501e885f456e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2073.761034] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2073.761034] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance network_info: |[{"id": "67fc8868-0816-4461-8ed9-1c34200f5e16", "address": "fa:16:3e:20:90:bd", "network": {"id": "c1c79d63-5676-4e03-a591-cb046d461540", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-617863065", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67fc8868-08", "ovs_interfaceid": "67fc8868-0816-4461-8ed9-1c34200f5e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55c7ff44-dbe4-4808-9547-501e885f456e", "address": "fa:16:3e:82:45:e1", "network": {"id": "427d54ba-0e99-4eb2-b8b7-a25bbdc86070", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-679417950", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c7ff44-db", "ovs_interfaceid": "55c7ff44-dbe4-4808-9547-501e885f456e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2073.761034] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:90:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67fc8868-0816-4461-8ed9-1c34200f5e16', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:45:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55c7ff44-dbe4-4808-9547-501e885f456e', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2073.769191] env[61964]: DEBUG oslo.service.loopingcall [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2073.769835] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63911858-5a79-4479-8c92-46afca980300] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2073.770241] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6490f06-2086-4d42-8bd1-00be41112b49 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.792183] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2073.792183] env[61964]: value = "task-1688674" [ 2073.792183] env[61964]: _type = "Task" [ 2073.792183] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.800387] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688674, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.215114] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received event network-changed-67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2074.215114] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Refreshing instance network info cache due to event network-changed-67fc8868-0816-4461-8ed9-1c34200f5e16. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2074.215114] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Acquiring lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2074.215114] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Acquired lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2074.216096] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Refreshing network info cache for port 67fc8868-0816-4461-8ed9-1c34200f5e16 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2074.304226] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688674, 'name': CreateVM_Task, 'duration_secs': 0.33239} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.304226] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63911858-5a79-4479-8c92-46afca980300] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2074.312095] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2074.312095] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2074.312095] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2074.312095] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ea555a8-4d5d-4a79-aa53-42d830659485 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.317118] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2074.317118] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5249469f-a6f8-f42d-6a00-87744f6d6a6f" [ 2074.317118] env[61964]: _type = "Task" [ 2074.317118] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.326179] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5249469f-a6f8-f42d-6a00-87744f6d6a6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.498055] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Updated VIF entry in instance network info cache for port 67fc8868-0816-4461-8ed9-1c34200f5e16. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2074.498055] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Updating instance_info_cache with network_info: [{"id": "67fc8868-0816-4461-8ed9-1c34200f5e16", "address": "fa:16:3e:20:90:bd", "network": {"id": "c1c79d63-5676-4e03-a591-cb046d461540", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-617863065", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67fc8868-08", "ovs_interfaceid": "67fc8868-0816-4461-8ed9-1c34200f5e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55c7ff44-dbe4-4808-9547-501e885f456e", "address": "fa:16:3e:82:45:e1", "network": {"id": "427d54ba-0e99-4eb2-b8b7-a25bbdc86070", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-679417950", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c7ff44-db", "ovs_interfaceid": "55c7ff44-dbe4-4808-9547-501e885f456e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Releasing lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2074.511039] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received event network-vif-plugged-55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Acquiring lock "63911858-5a79-4479-8c92-46afca980300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Lock "63911858-5a79-4479-8c92-46afca980300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Lock "63911858-5a79-4479-8c92-46afca980300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2074.511039] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] No waiting events found dispatching network-vif-plugged-55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2074.511039] env[61964]: WARNING nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received unexpected event network-vif-plugged-55c7ff44-dbe4-4808-9547-501e885f456e for instance with vm_state building and task_state deleting. [ 2074.511039] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Received event network-changed-55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2074.511039] env[61964]: DEBUG nova.compute.manager [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Refreshing instance network info cache due to event network-changed-55c7ff44-dbe4-4808-9547-501e885f456e. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Acquiring lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2074.511039] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Acquired lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2074.511039] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Refreshing network info cache for port 55c7ff44-dbe4-4808-9547-501e885f456e {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2074.816197] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Updated VIF entry in instance network info cache for port 55c7ff44-dbe4-4808-9547-501e885f456e. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2074.816626] env[61964]: DEBUG nova.network.neutron [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] [instance: 63911858-5a79-4479-8c92-46afca980300] Updating instance_info_cache with network_info: [{"id": "67fc8868-0816-4461-8ed9-1c34200f5e16", "address": "fa:16:3e:20:90:bd", "network": {"id": "c1c79d63-5676-4e03-a591-cb046d461540", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-617863065", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67fc8868-08", "ovs_interfaceid": "67fc8868-0816-4461-8ed9-1c34200f5e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55c7ff44-dbe4-4808-9547-501e885f456e", "address": "fa:16:3e:82:45:e1", "network": {"id": "427d54ba-0e99-4eb2-b8b7-a25bbdc86070", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-679417950", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c7ff44-db", "ovs_interfaceid": "55c7ff44-dbe4-4808-9547-501e885f456e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.828197] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2074.828431] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2074.828638] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2074.829044] env[61964]: DEBUG oslo_concurrency.lockutils [req-66382f07-6861-4874-b9cf-de1ea2d255d5 req-ecdaa906-a285-455d-92b1-1063c08ffd40 service nova] Releasing lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2093.427248] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "aacff339-acaa-481d-930f-a4e838525cc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2093.427521] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2093.853116] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3535e3e5-91a2-49fc-845e-24c4ecf2139d tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "2f54f015-36dd-4390-a8a3-afa767581e44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2093.853358] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3535e3e5-91a2-49fc-845e-24c4ecf2139d tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "2f54f015-36dd-4390-a8a3-afa767581e44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2094.122021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-52abaf69-9807-4d95-a935-c5e527c36741 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "79a8f962-d252-4895-9ac3-a2e214fc7d82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2094.122252] env[61964]: DEBUG oslo_concurrency.lockutils [None req-52abaf69-9807-4d95-a935-c5e527c36741 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "79a8f962-d252-4895-9ac3-a2e214fc7d82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2110.352254] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8cae8e77-203d-447b-a354-728dafa9c0c1 tempest-ServerShowV257Test-31655054 tempest-ServerShowV257Test-31655054-project-member] Acquiring lock "97aa9519-8986-49b9-a5d3-5a24968c709b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2110.352601] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8cae8e77-203d-447b-a354-728dafa9c0c1 tempest-ServerShowV257Test-31655054 tempest-ServerShowV257Test-31655054-project-member] Lock "97aa9519-8986-49b9-a5d3-5a24968c709b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2111.045356] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2111.045540] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2111.045666] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 63911858-5a79-4479-8c92-46afca980300] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2111.069022] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2111.069510] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2111.383558] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.311525] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33cd8891-046f-4b6f-ac1c-c4b6356c89fc tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Acquiring lock "469107ae-93b5-467d-94d5-d9c78766a934" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2117.311805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33cd8891-046f-4b6f-ac1c-c4b6356c89fc tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "469107ae-93b5-467d-94d5-d9c78766a934" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2117.383971] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.384175] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2117.676506] env[61964]: WARNING oslo_vmware.rw_handles [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2117.676506] env[61964]: ERROR oslo_vmware.rw_handles [ 2117.677047] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2117.678788] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2117.678986] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Copying Virtual Disk [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/e9ec2109-e503-4d26-83f7-f27f815d6e6d/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2117.679284] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db6bd153-998b-4e21-967d-1620d9db8ce3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.688581] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2117.688581] env[61964]: value = "task-1688675" [ 2117.688581] env[61964]: _type = "Task" [ 2117.688581] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.696495] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688675, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.199383] env[61964]: DEBUG oslo_vmware.exceptions [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2118.200340] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2118.200340] env[61964]: ERROR nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.200340] env[61964]: Faults: ['InvalidArgument'] [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Traceback (most recent call last): [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] yield resources [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self.driver.spawn(context, instance, image_meta, [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self._fetch_image_if_missing(context, vi) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] image_cache(vi, tmp_image_ds_loc) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] vm_util.copy_virtual_disk( [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] session._wait_for_task(vmdk_copy_task) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return self.wait_for_task(task_ref) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return evt.wait() [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] result = hub.switch() [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return self.greenlet.switch() [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self.f(*self.args, **self.kw) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] raise exceptions.translate_fault(task_info.error) [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Faults: ['InvalidArgument'] [ 2118.200340] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] [ 2118.201195] env[61964]: INFO nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Terminating instance [ 2118.202358] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2118.202437] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.203086] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2118.203284] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2118.203516] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ce8c0ca-3de8-4bd8-9d0a-82a1fa3f78a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.205819] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e45009-1430-433c-ba3c-399c8cc2cfca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.212965] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2118.213222] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a93469e4-eee5-44ca-bb81-fe72053f4817 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.215340] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.215511] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2118.216445] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2219a7af-64a7-4eba-b6b8-a42bc21eb3e9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.221210] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2118.221210] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f3b1c6-2519-678b-f30d-bc05153ad072" [ 2118.221210] env[61964]: _type = "Task" [ 2118.221210] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.229650] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f3b1c6-2519-678b-f30d-bc05153ad072, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.290083] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2118.290314] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2118.290545] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleting the datastore file [datastore1] ac955d73-c844-4b98-b791-7d7c749c6954 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2118.290814] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c967e2d-2e17-48dd-8d90-1ff2c9b262fd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.296653] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2118.296653] env[61964]: value = "task-1688677" [ 2118.296653] env[61964]: _type = "Task" [ 2118.296653] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.304594] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.384634] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.733105] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2118.733373] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating directory with path [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2118.733642] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69b6c2fb-9453-46c3-9273-098228bc4531 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.745192] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created directory with path [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2118.745390] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Fetch image to [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2118.745558] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2118.746319] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b785a22d-0cc1-4280-ae3d-d96efccf36a2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.753012] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa4ffb1-9a71-4700-9cf3-49a153ba8317 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.762083] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b32434-1d86-4901-ba53-6f0bea4807a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.794020] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861b036b-1c79-4b59-a98f-2359803127c1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.800495] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75e621ca-40e3-4276-9f93-a15727832d1f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.806489] env[61964]: DEBUG oslo_vmware.api [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063963} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.806717] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.806887] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2118.807061] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.807224] env[61964]: INFO nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2118.809339] env[61964]: DEBUG nova.compute.claims [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2118.809493] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2118.809691] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2118.825616] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2118.884186] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2118.946252] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2118.946447] env[61964]: DEBUG oslo_vmware.rw_handles [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2119.180026] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaf3c25-2648-4a67-bb94-dc29478f5cf2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.188021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47748b3-2177-46b4-a85c-9f6b82332a6c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.217035] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6114fd7-3060-4a34-9536-7cb3a5131dc8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.224421] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917acb5f-a3ae-4bbb-bdc7-0c71597d0476 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.237912] env[61964]: DEBUG nova.compute.provider_tree [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2119.246314] env[61964]: DEBUG nova.scheduler.client.report [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2119.269368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.459s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2119.269893] env[61964]: ERROR nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2119.269893] env[61964]: Faults: ['InvalidArgument'] [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Traceback (most recent call last): [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self.driver.spawn(context, instance, image_meta, [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self._fetch_image_if_missing(context, vi) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] image_cache(vi, tmp_image_ds_loc) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] vm_util.copy_virtual_disk( [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] session._wait_for_task(vmdk_copy_task) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return self.wait_for_task(task_ref) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return evt.wait() [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] result = hub.switch() [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] return self.greenlet.switch() [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] self.f(*self.args, **self.kw) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] raise exceptions.translate_fault(task_info.error) [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Faults: ['InvalidArgument'] [ 2119.269893] env[61964]: ERROR nova.compute.manager [instance: ac955d73-c844-4b98-b791-7d7c749c6954] [ 2119.270937] env[61964]: DEBUG nova.compute.utils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2119.273738] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Build of instance ac955d73-c844-4b98-b791-7d7c749c6954 was re-scheduled: A specified parameter was not correct: fileType [ 2119.273738] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2119.274113] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2119.274292] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2119.274474] env[61964]: DEBUG nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2119.274636] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.383882] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.841499] env[61964]: DEBUG nova.network.neutron [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.858416] env[61964]: INFO nova.compute.manager [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Took 0.58 seconds to deallocate network for instance. [ 2119.974066] env[61964]: INFO nova.scheduler.client.report [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleted allocations for instance ac955d73-c844-4b98-b791-7d7c749c6954 [ 2119.995409] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7667bf45-869a-4076-9305-6f61480c3b05 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 649.731s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2119.996571] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 451.461s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2119.997278] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2119.997546] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2119.997725] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.000032] env[61964]: INFO nova.compute.manager [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Terminating instance [ 2120.001943] env[61964]: DEBUG nova.compute.manager [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2120.002592] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2120.002663] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86c484d9-7003-44b9-9451-6605f3217cbe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.016057] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff042df3-3ded-49f8-87bb-79c7086293d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.030294] env[61964]: DEBUG nova.compute.manager [None req-355765b5-a69a-4608-886c-0b96d103db89 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] [instance: c68c1fe4-ef07-4bb7-b9be-16e02d0bd855] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.059845] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ac955d73-c844-4b98-b791-7d7c749c6954 could not be found. [ 2120.059845] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2120.059845] env[61964]: INFO nova.compute.manager [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2120.059845] env[61964]: DEBUG oslo.service.loopingcall [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.059845] env[61964]: DEBUG nova.compute.manager [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2120.059845] env[61964]: DEBUG nova.network.neutron [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2120.062629] env[61964]: DEBUG nova.compute.manager [None req-355765b5-a69a-4608-886c-0b96d103db89 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] [instance: c68c1fe4-ef07-4bb7-b9be-16e02d0bd855] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.094506] env[61964]: DEBUG nova.network.neutron [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.096491] env[61964]: DEBUG oslo_concurrency.lockutils [None req-355765b5-a69a-4608-886c-0b96d103db89 tempest-SecurityGroupsTestJSON-2081359831 tempest-SecurityGroupsTestJSON-2081359831-project-member] Lock "c68c1fe4-ef07-4bb7-b9be-16e02d0bd855" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.247s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.106770] env[61964]: INFO nova.compute.manager [-] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] Took 0.05 seconds to deallocate network for instance. [ 2120.108954] env[61964]: DEBUG nova.compute.manager [None req-70b27178-b569-4d03-845f-529b82a14ddb tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] [instance: 0dad08e5-1725-4e1e-98f1-068da1f9edcf] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.141222] env[61964]: DEBUG nova.compute.manager [None req-70b27178-b569-4d03-845f-529b82a14ddb tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] [instance: 0dad08e5-1725-4e1e-98f1-068da1f9edcf] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.174968] env[61964]: DEBUG oslo_concurrency.lockutils [None req-70b27178-b569-4d03-845f-529b82a14ddb tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Lock "0dad08e5-1725-4e1e-98f1-068da1f9edcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.322s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.193392] env[61964]: DEBUG nova.compute.manager [None req-dc1bdcd5-d243-4280-9725-4cf31043d27d tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] [instance: fa5eb87f-8546-4e29-b3d2-0e898d113beb] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.220715] env[61964]: DEBUG oslo_concurrency.lockutils [None req-62c5b9d1-5a74-4308-8119-af66ac1ad642 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.221848] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 90.620s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2120.222047] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ac955d73-c844-4b98-b791-7d7c749c6954] During sync_power_state the instance has a pending task (deleting). Skip. [ 2120.222225] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "ac955d73-c844-4b98-b791-7d7c749c6954" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.226505] env[61964]: DEBUG nova.compute.manager [None req-dc1bdcd5-d243-4280-9725-4cf31043d27d tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] [instance: fa5eb87f-8546-4e29-b3d2-0e898d113beb] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.249100] env[61964]: DEBUG oslo_concurrency.lockutils [None req-dc1bdcd5-d243-4280-9725-4cf31043d27d tempest-ServerRescueNegativeTestJSON-830775210 tempest-ServerRescueNegativeTestJSON-830775210-project-member] Lock "fa5eb87f-8546-4e29-b3d2-0e898d113beb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.867s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.257616] env[61964]: DEBUG nova.compute.manager [None req-a2c7bf34-e1f1-4f64-a2bd-fd52a4794cbe tempest-ServerShowV254Test-650985770 tempest-ServerShowV254Test-650985770-project-member] [instance: 22d0050d-4654-4e63-ae79-bc209d714635] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.281217] env[61964]: DEBUG nova.compute.manager [None req-a2c7bf34-e1f1-4f64-a2bd-fd52a4794cbe tempest-ServerShowV254Test-650985770 tempest-ServerShowV254Test-650985770-project-member] [instance: 22d0050d-4654-4e63-ae79-bc209d714635] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.302356] env[61964]: DEBUG oslo_concurrency.lockutils [None req-a2c7bf34-e1f1-4f64-a2bd-fd52a4794cbe tempest-ServerShowV254Test-650985770 tempest-ServerShowV254Test-650985770-project-member] Lock "22d0050d-4654-4e63-ae79-bc209d714635" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.803s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.312364] env[61964]: DEBUG nova.compute.manager [None req-f5210043-641c-4387-9b11-7aa883579e93 tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 149b16e3-d4d7-48c8-a7e4-32d869e82615] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.341375] env[61964]: DEBUG nova.compute.manager [None req-f5210043-641c-4387-9b11-7aa883579e93 tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 149b16e3-d4d7-48c8-a7e4-32d869e82615] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.368434] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f5210043-641c-4387-9b11-7aa883579e93 tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "149b16e3-d4d7-48c8-a7e4-32d869e82615" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.627s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.378934] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.384034] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.386568] env[61964]: DEBUG nova.compute.manager [None req-e9299c25-241d-4165-9440-9988c42982b6 tempest-ServersNegativeTestMultiTenantJSON-853977313 tempest-ServersNegativeTestMultiTenantJSON-853977313-project-member] [instance: 720f4e4b-295e-4a1d-af1e-bfa6739844c4] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.423995] env[61964]: DEBUG nova.compute.manager [None req-e9299c25-241d-4165-9440-9988c42982b6 tempest-ServersNegativeTestMultiTenantJSON-853977313 tempest-ServersNegativeTestMultiTenantJSON-853977313-project-member] [instance: 720f4e4b-295e-4a1d-af1e-bfa6739844c4] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2120.445979] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e9299c25-241d-4165-9440-9988c42982b6 tempest-ServersNegativeTestMultiTenantJSON-853977313 tempest-ServersNegativeTestMultiTenantJSON-853977313-project-member] Lock "720f4e4b-295e-4a1d-af1e-bfa6739844c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.003s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.456246] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2120.516784] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2120.517044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2120.519440] env[61964]: INFO nova.compute.claims [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2120.855319] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239141f9-2f43-40b1-b266-1ea601ef135d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.863510] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1922f0e-bf5e-4c49-a176-52395a18a6d0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.892405] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4885f00b-ae0a-4b30-9ece-a53a2d623fba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.899519] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bc6a3e-739a-45a4-ac47-91476e7d0754 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.912403] env[61964]: DEBUG nova.compute.provider_tree [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2120.920971] env[61964]: DEBUG nova.scheduler.client.report [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2120.935767] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.419s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2120.936154] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2120.970021] env[61964]: DEBUG nova.compute.utils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2120.974021] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2120.974021] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2120.981986] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2121.024234] env[61964]: INFO nova.virt.block_device [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Booting with volume 91fcd011-4dd0-41e1-9d26-cc0ee3ff9268 at /dev/sda [ 2121.080622] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25cbf135-8237-40b3-80a4-cf312632b8db {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.089686] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60fb862-9d60-445d-b60c-b85ad417d99c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.101193] env[61964]: DEBUG nova.policy [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40b4b78b159542498d86986f20b754b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96a451dfd354bc79cb23d0af9a92136', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2121.120138] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3047483-c864-4541-8206-b5fe20083a94 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.130217] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170db4d7-4ba7-46c3-bc61-ea87ef0cd403 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.155986] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95859854-4c6e-45a6-8c51-29dec4fb3437 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.165047] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff30171-eff8-49ec-a047-04e1c43cb856 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.179183] env[61964]: DEBUG nova.virt.block_device [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating existing volume attachment record: 88a0110a-9686-41b8-9048-8983295613dd {{(pid=61964) _volume_attach /opt/stack/nova/nova/virt/block_device.py:631}} [ 2121.401671] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2121.402355] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2121.402580] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2121.402757] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2121.403111] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2121.403325] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2121.403606] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2121.403881] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2121.404095] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2121.404309] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2121.404619] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2121.404856] env[61964]: DEBUG nova.virt.hardware [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2121.406064] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c741b7-5d60-4ee2-8e56-ca3b100876e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.416134] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ead4545-f0af-46ec-a8b9-a23e06394594 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.974118] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Successfully created port: 7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2122.758647] env[61964]: DEBUG nova.compute.manager [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Received event network-vif-plugged-7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2122.758921] env[61964]: DEBUG oslo_concurrency.lockutils [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] Acquiring lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2122.759148] env[61964]: DEBUG oslo_concurrency.lockutils [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2122.759317] env[61964]: DEBUG oslo_concurrency.lockutils [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2122.759481] env[61964]: DEBUG nova.compute.manager [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] No waiting events found dispatching network-vif-plugged-7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2122.759645] env[61964]: WARNING nova.compute.manager [req-c99733a3-243e-42ce-90e7-4c29e665f169 req-891e6121-3ee1-45a7-9310-ebc6f4202f50 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Received unexpected event network-vif-plugged-7e11c525-23ee-43ff-a4ff-11c2a1341fbd for instance with vm_state building and task_state spawning. [ 2122.830341] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Successfully updated port: 7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2122.846052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2122.846052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquired lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2122.846052] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2122.896785] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2123.063019] env[61964]: DEBUG nova.network.neutron [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating instance_info_cache with network_info: [{"id": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "address": "fa:16:3e:4a:a1:a7", "network": {"id": "ff75fe0a-4ef8-41a3-a35f-ccd35c3f657e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1881349418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96a451dfd354bc79cb23d0af9a92136", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e11c525-23", "ovs_interfaceid": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.079312] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Releasing lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2123.079609] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Instance network_info: |[{"id": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "address": "fa:16:3e:4a:a1:a7", "network": {"id": "ff75fe0a-4ef8-41a3-a35f-ccd35c3f657e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1881349418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96a451dfd354bc79cb23d0af9a92136", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e11c525-23", "ovs_interfaceid": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2123.083237] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:a1:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7041d198-66a3-40de-bf7d-cfc036e6ed69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e11c525-23ee-43ff-a4ff-11c2a1341fbd', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2123.089288] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Creating folder: Project (e96a451dfd354bc79cb23d0af9a92136). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2123.089833] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6a540b9-0e6c-488a-8604-2724cd4caf15 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.103979] env[61964]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2123.104154] env[61964]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61964) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2123.104684] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Folder already exists: Project (e96a451dfd354bc79cb23d0af9a92136). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2123.104878] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Creating folder: Instances. Parent ref: group-v352003. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2123.105122] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d90dab9c-d3c0-4b2a-8ae2-34cb57fb19a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.114679] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Created folder: Instances in parent group-v352003. [ 2123.114907] env[61964]: DEBUG oslo.service.loopingcall [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2123.115098] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2123.115293] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16b95cea-fa21-4dec-a1d7-e93f8b39fb21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.136439] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2123.136439] env[61964]: value = "task-1688680" [ 2123.136439] env[61964]: _type = "Task" [ 2123.136439] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.144628] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688680, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.383602] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.395748] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2123.396430] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2123.396430] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2123.396430] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2123.398200] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6490e145-7201-4b6d-9da8-889f7037069f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.410838] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c7e3fc-546f-4521-a40b-dac751e75c67 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.424231] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcb8473-c2a7-4999-b32b-6458b5c5f708 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.431474] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272a158c-7632-4580-a4c5-744660326810 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.464075] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2123.464354] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2123.464428] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2123.558435] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.558861] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.558861] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.558982] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559127] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559174] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559273] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559391] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559484] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.559632] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2123.573184] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.585215] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.595575] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.605974] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.616231] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance d0cd87ae-53f6-4b03-9b49-b84b34cea243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.626250] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance abb43b76-b3df-4d4a-b1f4-801306d0f01f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.636441] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.647738] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688680, 'name': CreateVM_Task, 'duration_secs': 0.344253} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.648361] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2f54f015-36dd-4390-a8a3-afa767581e44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.649421] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2123.650401] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'device_type': None, 'boot_index': 0, 'guest_format': None, 'mount_device': '/dev/sda', 'delete_on_termination': True, 'attachment_id': '88a0110a-9686-41b8-9048-8983295613dd', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352006', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'name': 'volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c7caa492-efdd-462a-9bc3-9f19d6b0f7a8', 'attached_at': '', 'detached_at': '', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'serial': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268'}, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=61964) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2123.650687] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Root volume attach. Driver type: vmdk {{(pid=61964) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2123.651485] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a905d0c-6439-4192-acc2-90877b58ef45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.661099] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 79a8f962-d252-4895-9ac3-a2e214fc7d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.662681] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d3f749-0dcd-4190-86f1-6d5c579c87b8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.669942] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1f08dd-5518-4c41-9a8a-4478a6daf5f3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.673836] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 97aa9519-8986-49b9-a5d3-5a24968c709b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.679473] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e6576694-f397-4db5-a4a5-b3f5e046f26e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.686291] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2123.686291] env[61964]: value = "task-1688681" [ 2123.686291] env[61964]: _type = "Task" [ 2123.686291] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.687522] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 469107ae-93b5-467d-94d5-d9c78766a934 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2123.687796] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2123.687966] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2123.699233] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688681, 'name': RelocateVM_Task} progress is 5%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.992952] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad164b57-e9ea-4246-adaf-1de1f12cf69e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.000677] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dac0ff-f308-4d0d-9c95-660301ed072f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.031128] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73f8d46-7682-4c1a-a85e-9f513af417c6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.038681] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30b1fe3-46e8-444a-a44a-bf3c2fd57b63 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.051716] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2124.059844] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2124.074911] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2124.074911] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.610s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2124.197912] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688681, 'name': RelocateVM_Task, 'duration_secs': 0.376002} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.198348] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Volume attach. Driver type: vmdk {{(pid=61964) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2124.198559] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352006', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'name': 'volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c7caa492-efdd-462a-9bc3-9f19d6b0f7a8', 'attached_at': '', 'detached_at': '', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'serial': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268'} {{(pid=61964) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2124.199343] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d49f05-03c2-4d7d-80ea-46fb03c5bfc4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.217662] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12a6d89-0063-4317-9f18-cdb4a44deee9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.940022] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268/volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268.vmdk or device None with type thin {{(pid=61964) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2124.940022] env[61964]: DEBUG nova.compute.manager [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Received event network-changed-7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2124.940022] env[61964]: DEBUG nova.compute.manager [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Refreshing instance network info cache due to event network-changed-7e11c525-23ee-43ff-a4ff-11c2a1341fbd. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2124.940022] env[61964]: DEBUG oslo_concurrency.lockutils [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] Acquiring lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2124.940022] env[61964]: DEBUG oslo_concurrency.lockutils [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] Acquired lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2124.940022] env[61964]: DEBUG nova.network.neutron [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Refreshing network info cache for port 7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2124.940381] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f693470f-f1c3-4cb2-814d-71f120cc9f7c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.962705] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2124.962705] env[61964]: value = "task-1688682" [ 2124.962705] env[61964]: _type = "Task" [ 2124.962705] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.971353] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.220932] env[61964]: DEBUG nova.network.neutron [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updated VIF entry in instance network info cache for port 7e11c525-23ee-43ff-a4ff-11c2a1341fbd. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2125.221288] env[61964]: DEBUG nova.network.neutron [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating instance_info_cache with network_info: [{"id": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "address": "fa:16:3e:4a:a1:a7", "network": {"id": "ff75fe0a-4ef8-41a3-a35f-ccd35c3f657e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1881349418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96a451dfd354bc79cb23d0af9a92136", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e11c525-23", "ovs_interfaceid": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.230918] env[61964]: DEBUG oslo_concurrency.lockutils [req-f9306a7b-e84b-45d4-94a3-4ab291754904 req-88d01202-7560-4673-93ce-0108344a74ae service nova] Releasing lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2125.473175] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688682, 'name': ReconfigVM_Task, 'duration_secs': 0.282622} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.473429] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268/volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268.vmdk or device None with type thin {{(pid=61964) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2125.478189] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8413359c-e9b5-4df6-869d-da448d6f5f29 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.492757] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2125.492757] env[61964]: value = "task-1688683" [ 2125.492757] env[61964]: _type = "Task" [ 2125.492757] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.502414] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688683, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.002560] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688683, 'name': ReconfigVM_Task, 'duration_secs': 0.116212} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.002872] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352006', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'name': 'volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c7caa492-efdd-462a-9bc3-9f19d6b0f7a8', 'attached_at': '', 'detached_at': '', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'serial': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268'} {{(pid=61964) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2126.003545] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ff5bfbd-c17e-4727-8147-a134a10682d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.009918] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2126.009918] env[61964]: value = "task-1688684" [ 2126.009918] env[61964]: _type = "Task" [ 2126.009918] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.018019] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688684, 'name': Rename_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.070861] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.519113] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688684, 'name': Rename_Task, 'duration_secs': 0.127228} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.519488] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Powering on the VM {{(pid=61964) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 2126.519595] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c89a89a4-344f-4970-b24b-b584e20b2d87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.526104] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2126.526104] env[61964]: value = "task-1688685" [ 2126.526104] env[61964]: _type = "Task" [ 2126.526104] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.533126] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688685, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.036320] env[61964]: DEBUG oslo_vmware.api [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688685, 'name': PowerOnVM_Task, 'duration_secs': 0.461002} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.036579] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Powered on the VM {{(pid=61964) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 2127.036781] env[61964]: INFO nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Took 5.63 seconds to spawn the instance on the hypervisor. [ 2127.037035] env[61964]: DEBUG nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Checking state {{(pid=61964) _get_power_state /opt/stack/nova/nova/compute/manager.py:1766}} [ 2127.037785] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050426a2-5d76-4bf3-ad0b-e3344f36fed0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.089165] env[61964]: INFO nova.compute.manager [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Took 6.59 seconds to build instance. [ 2127.102476] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d74c52cd-71ef-4f41-9dcd-cedb882dbf04 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 173.864s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2127.112182] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2127.167021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2127.167021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2127.167859] env[61964]: INFO nova.compute.claims [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2127.451760] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d22d7-be3f-4140-8154-ea46ef0e03a8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.459431] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18df6fe5-65a2-49b0-a993-f431a1f4b4e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.488830] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6029392b-1cce-439e-8712-0e3ec6201761 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.495951] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aeeac6-3060-4087-9974-2490a328fb55 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.508971] env[61964]: DEBUG nova.compute.provider_tree [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.517418] env[61964]: DEBUG nova.scheduler.client.report [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2127.530881] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2127.531349] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2127.562092] env[61964]: DEBUG nova.compute.utils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2127.563713] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2127.563830] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2127.572065] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2127.636133] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2127.639288] env[61964]: DEBUG nova.policy [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '099728a168f0480d982589c508e67704', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bdaa1d5307545ea9ec5cc94ae276387', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2127.660102] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2127.660336] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2127.660495] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2127.660799] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2127.660953] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2127.661114] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2127.661318] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2127.661473] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2127.661635] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2127.661792] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2127.661959] env[61964]: DEBUG nova.virt.hardware [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2127.662790] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b65db2-c498-4c5c-89eb-694db4ca76fb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.670350] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bc8fd2-17f2-4390-9ea8-65588aff085e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.056486] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Successfully created port: b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2128.719205] env[61964]: DEBUG nova.compute.manager [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Received event network-vif-plugged-b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2128.719205] env[61964]: DEBUG oslo_concurrency.lockutils [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] Acquiring lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2128.719205] env[61964]: DEBUG oslo_concurrency.lockutils [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2128.719205] env[61964]: DEBUG oslo_concurrency.lockutils [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2128.719205] env[61964]: DEBUG nova.compute.manager [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] No waiting events found dispatching network-vif-plugged-b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2128.719205] env[61964]: WARNING nova.compute.manager [req-c883cc8e-755c-4f36-8eb9-69fa7da0e949 req-3b977355-f736-4d5d-bf8a-3fa1c960121a service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Received unexpected event network-vif-plugged-b9f5a068-9b7a-4158-91b2-0ab7a17a1811 for instance with vm_state building and task_state spawning. [ 2128.797766] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Successfully updated port: b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2128.810703] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2128.810703] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquired lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2128.810703] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2128.856529] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2129.025995] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Updating instance_info_cache with network_info: [{"id": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "address": "fa:16:3e:90:64:9f", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f5a068-9b", "ovs_interfaceid": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.043167] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Releasing lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2129.043842] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance network_info: |[{"id": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "address": "fa:16:3e:90:64:9f", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f5a068-9b", "ovs_interfaceid": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2129.046024] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:64:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9f5a068-9b7a-4158-91b2-0ab7a17a1811', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2129.053348] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Creating folder: Project (9bdaa1d5307545ea9ec5cc94ae276387). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2129.054344] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bc2f14e-716e-4e97-ab0f-47bc9c0a26f8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.068018] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Created folder: Project (9bdaa1d5307545ea9ec5cc94ae276387) in parent group-v351942. [ 2129.068018] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Creating folder: Instances. Parent ref: group-v352014. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2129.068018] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c75067e-1cbb-4582-8592-e2f70c339b03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.079026] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Created folder: Instances in parent group-v352014. [ 2129.079026] env[61964]: DEBUG oslo.service.loopingcall [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2129.079026] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2129.079026] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82790b4e-ed82-4e5d-ac62-cfe480c290cd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.098682] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2129.098682] env[61964]: value = "task-1688688" [ 2129.098682] env[61964]: _type = "Task" [ 2129.098682] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.107747] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688688, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.274515] env[61964]: DEBUG nova.compute.manager [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Received event network-changed-7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2129.274716] env[61964]: DEBUG nova.compute.manager [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Refreshing instance network info cache due to event network-changed-7e11c525-23ee-43ff-a4ff-11c2a1341fbd. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2129.274931] env[61964]: DEBUG oslo_concurrency.lockutils [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] Acquiring lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2129.275352] env[61964]: DEBUG oslo_concurrency.lockutils [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] Acquired lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2129.275920] env[61964]: DEBUG nova.network.neutron [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Refreshing network info cache for port 7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2129.608512] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688688, 'name': CreateVM_Task, 'duration_secs': 0.359392} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.608743] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2129.609390] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2129.609583] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2129.609849] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2129.610133] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91fb268-5489-48cf-af3d-d967e8ef01bd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.614790] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for the task: (returnval){ [ 2129.614790] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526f553a-3cc3-b005-fb34-e61ab072b0c3" [ 2129.614790] env[61964]: _type = "Task" [ 2129.614790] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.624721] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526f553a-3cc3-b005-fb34-e61ab072b0c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.864177] env[61964]: DEBUG nova.network.neutron [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updated VIF entry in instance network info cache for port 7e11c525-23ee-43ff-a4ff-11c2a1341fbd. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2129.864566] env[61964]: DEBUG nova.network.neutron [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating instance_info_cache with network_info: [{"id": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "address": "fa:16:3e:4a:a1:a7", "network": {"id": "ff75fe0a-4ef8-41a3-a35f-ccd35c3f657e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1881349418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96a451dfd354bc79cb23d0af9a92136", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e11c525-23", "ovs_interfaceid": "7e11c525-23ee-43ff-a4ff-11c2a1341fbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.875293] env[61964]: DEBUG oslo_concurrency.lockutils [req-11670cfb-62d1-40c4-802a-cc90a3675e68 req-26479faf-b019-408f-b714-8f7a18463c96 service nova] Releasing lock "refresh_cache-c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2130.126081] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2130.126081] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2130.126081] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2130.749107] env[61964]: DEBUG nova.compute.manager [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Received event network-changed-b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2130.749230] env[61964]: DEBUG nova.compute.manager [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Refreshing instance network info cache due to event network-changed-b9f5a068-9b7a-4158-91b2-0ab7a17a1811. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2130.749433] env[61964]: DEBUG oslo_concurrency.lockutils [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] Acquiring lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2130.749574] env[61964]: DEBUG oslo_concurrency.lockutils [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] Acquired lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2130.749728] env[61964]: DEBUG nova.network.neutron [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Refreshing network info cache for port b9f5a068-9b7a-4158-91b2-0ab7a17a1811 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2131.024032] env[61964]: DEBUG nova.network.neutron [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Updated VIF entry in instance network info cache for port b9f5a068-9b7a-4158-91b2-0ab7a17a1811. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2131.024396] env[61964]: DEBUG nova.network.neutron [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Updating instance_info_cache with network_info: [{"id": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "address": "fa:16:3e:90:64:9f", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9f5a068-9b", "ovs_interfaceid": "b9f5a068-9b7a-4158-91b2-0ab7a17a1811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.034060] env[61964]: DEBUG oslo_concurrency.lockutils [req-cf767515-45be-420b-a63a-ce673c0100ff req-ee49d0d1-d074-46cd-8c41-03dd92b752f9 service nova] Releasing lock "refresh_cache-c91a6c73-d161-488e-a27d-08c1ab3e3e80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2146.463802] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2146.464105] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2146.464570] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2146.464767] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2146.464949] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2146.467232] env[61964]: INFO nova.compute.manager [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Terminating instance [ 2146.469048] env[61964]: DEBUG nova.compute.manager [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2146.469253] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Powering off the VM {{(pid=61964) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2146.469863] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00203cc7-06e3-4457-9a39-ecb9245d15a2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.477242] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2146.477242] env[61964]: value = "task-1688689" [ 2146.477242] env[61964]: _type = "Task" [ 2146.477242] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.485542] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.986691] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688689, 'name': PowerOffVM_Task, 'duration_secs': 0.177748} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.986960] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Powered off the VM {{(pid=61964) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 2146.987166] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Volume detach. Driver type: vmdk {{(pid=61964) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2146.987353] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352006', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'name': 'volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c7caa492-efdd-462a-9bc3-9f19d6b0f7a8', 'attached_at': '', 'detached_at': '', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'serial': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268'} {{(pid=61964) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2146.988077] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7f81d3-bbe9-4c02-a969-ee7e31b3701f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.005890] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92de4da9-6e22-429c-9cfc-37d3d5ea39ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.012139] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0989b9-579a-4829-96c3-561674b80384 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.030284] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ac6fef-3d5b-4ce6-99cd-f57b9eeaeef4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.044996] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] The volume has not been displaced from its original location: [datastore1] volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268/volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268.vmdk. No consolidation needed. {{(pid=61964) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2147.049705] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Reconfiguring VM instance instance-00000043 to detach disk 2000 {{(pid=61964) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2147.049965] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45f55fe7-9fac-4252-9558-c1caf093e39a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.067353] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2147.067353] env[61964]: value = "task-1688690" [ 2147.067353] env[61964]: _type = "Task" [ 2147.067353] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.075203] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.577292] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688690, 'name': ReconfigVM_Task, 'duration_secs': 0.142625} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.577608] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Reconfigured VM instance instance-00000043 to detach disk 2000 {{(pid=61964) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2147.583589] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e330494-6cdc-4da5-850f-e4fb33c3b429 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.600028] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2147.600028] env[61964]: value = "task-1688691" [ 2147.600028] env[61964]: _type = "Task" [ 2147.600028] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.607925] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.108966] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688691, 'name': ReconfigVM_Task, 'duration_secs': 0.135737} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.109314] env[61964]: DEBUG nova.virt.vmwareapi.volumeops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352006', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'name': 'volume-91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c7caa492-efdd-462a-9bc3-9f19d6b0f7a8', 'attached_at': '', 'detached_at': '', 'volume_id': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268', 'serial': '91fcd011-4dd0-41e1-9d26-cc0ee3ff9268'} {{(pid=61964) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2148.109588] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2148.110344] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046ec7f6-2c7b-43a2-be79-884bbe1ef12a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.116894] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2148.117123] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a40d9ce5-0c86-4e47-8d1b-dbb94dfa36fd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.197434] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2148.197651] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2148.197829] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Deleting the datastore file [datastore1] c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2148.198098] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-543d47e7-85eb-436b-ba3c-ca2b50ab4674 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.204339] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for the task: (returnval){ [ 2148.204339] env[61964]: value = "task-1688693" [ 2148.204339] env[61964]: _type = "Task" [ 2148.204339] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.212232] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.714089] env[61964]: DEBUG oslo_vmware.api [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Task: {'id': task-1688693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074524} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.714362] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2148.714540] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2148.714782] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2148.714879] env[61964]: INFO nova.compute.manager [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Took 2.25 seconds to destroy the instance on the hypervisor. [ 2148.715140] env[61964]: DEBUG oslo.service.loopingcall [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2148.715329] env[61964]: DEBUG nova.compute.manager [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2148.715423] env[61964]: DEBUG nova.network.neutron [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2149.300100] env[61964]: DEBUG nova.network.neutron [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.316348] env[61964]: INFO nova.compute.manager [-] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Took 0.60 seconds to deallocate network for instance. [ 2149.320834] env[61964]: DEBUG nova.compute.manager [req-a4a994f8-ba8d-49db-bd41-b391f422d26c req-97b9ef6a-3bfe-4591-b39b-c6e26b3b203a service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Received event network-vif-deleted-7e11c525-23ee-43ff-a4ff-11c2a1341fbd {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2149.321013] env[61964]: INFO nova.compute.manager [req-a4a994f8-ba8d-49db-bd41-b391f422d26c req-97b9ef6a-3bfe-4591-b39b-c6e26b3b203a service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Neutron deleted interface 7e11c525-23ee-43ff-a4ff-11c2a1341fbd; detaching it from the instance and deleting it from the info cache [ 2149.321947] env[61964]: DEBUG nova.network.neutron [req-a4a994f8-ba8d-49db-bd41-b391f422d26c req-97b9ef6a-3bfe-4591-b39b-c6e26b3b203a service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.337317] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b03ad92e-0ad9-46cd-aca5-abac3d07ab4d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.357062] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049a3451-337c-43d4-ae1e-035a284e2893 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.390668] env[61964]: DEBUG nova.compute.manager [req-a4a994f8-ba8d-49db-bd41-b391f422d26c req-97b9ef6a-3bfe-4591-b39b-c6e26b3b203a service nova] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Detach interface failed, port_id=7e11c525-23ee-43ff-a4ff-11c2a1341fbd, reason: Instance c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 could not be found. {{(pid=61964) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10935}} [ 2149.404722] env[61964]: INFO nova.compute.manager [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Took 0.09 seconds to detach 1 volumes for instance. [ 2149.407097] env[61964]: DEBUG nova.compute.manager [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Deleting volume: 91fcd011-4dd0-41e1-9d26-cc0ee3ff9268 {{(pid=61964) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3221}} [ 2149.515860] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2149.516207] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2149.516459] env[61964]: DEBUG nova.objects.instance [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lazy-loading 'resources' on Instance uuid c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 {{(pid=61964) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1152}} [ 2149.842150] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb72894-ee5b-4255-bdb0-bc2726d682e0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.854816] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8805aa6f-fa8a-4b40-91fa-7eab84616061 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.885542] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0270feb9-66fd-49a6-9a49-a2e25d6a1136 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.893296] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4476091-cf6e-4ae6-a6b9-0ae19716ddd3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.908617] env[61964]: DEBUG nova.compute.provider_tree [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.919442] env[61964]: DEBUG nova.scheduler.client.report [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2149.936898] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.421s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2149.960319] env[61964]: INFO nova.scheduler.client.report [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Deleted allocations for instance c7caa492-efdd-462a-9bc3-9f19d6b0f7a8 [ 2150.010177] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d02365d2-51ad-472d-866a-3c533aebbad8 tempest-ServersTestBootFromVolume-2044776882 tempest-ServersTestBootFromVolume-2044776882-project-member] Lock "c7caa492-efdd-462a-9bc3-9f19d6b0f7a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.546s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2167.179087] env[61964]: WARNING oslo_vmware.rw_handles [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2167.179087] env[61964]: ERROR oslo_vmware.rw_handles [ 2167.179087] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2167.181292] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2167.181546] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Copying Virtual Disk [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/b1703071-754d-4573-a534-10c23e250c43/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2167.181863] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81d6396f-23d7-4af9-b7b8-bf75711a8849 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.190918] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2167.190918] env[61964]: value = "task-1688695" [ 2167.190918] env[61964]: _type = "Task" [ 2167.190918] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.199373] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': task-1688695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.701371] env[61964]: DEBUG oslo_vmware.exceptions [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2167.701657] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2167.702243] env[61964]: ERROR nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.702243] env[61964]: Faults: ['InvalidArgument'] [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Traceback (most recent call last): [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] yield resources [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self.driver.spawn(context, instance, image_meta, [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self._fetch_image_if_missing(context, vi) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] image_cache(vi, tmp_image_ds_loc) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] vm_util.copy_virtual_disk( [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] session._wait_for_task(vmdk_copy_task) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return self.wait_for_task(task_ref) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return evt.wait() [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] result = hub.switch() [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return self.greenlet.switch() [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self.f(*self.args, **self.kw) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] raise exceptions.translate_fault(task_info.error) [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Faults: ['InvalidArgument'] [ 2167.702243] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] [ 2167.704265] env[61964]: INFO nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Terminating instance [ 2167.704265] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2167.704454] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.704547] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d453b53e-09a2-4e01-bdac-0c7740b66614 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.706984] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2167.707175] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2167.708079] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0894b41c-cb9d-4c1f-a733-fc8821db73cb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.715640] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2167.715895] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-363cfbf0-69cd-4cfb-b1e7-dcc902cbfe57 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.718490] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.718625] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2167.719718] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b799d6-1435-4813-906f-95d4530e931e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.724756] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for the task: (returnval){ [ 2167.724756] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526652cd-ad28-08c8-97cc-474130291f3a" [ 2167.724756] env[61964]: _type = "Task" [ 2167.724756] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.732297] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526652cd-ad28-08c8-97cc-474130291f3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.860759] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2167.861092] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2167.861292] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleting the datastore file [datastore1] ae85ff01-5625-453d-9dcf-c8417fbb6e0c {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.861592] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dee62699-3291-425b-a6ba-636a7f67dd38 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.868343] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2167.868343] env[61964]: value = "task-1688697" [ 2167.868343] env[61964]: _type = "Task" [ 2167.868343] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.876264] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': task-1688697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.235767] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2168.236046] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Creating directory with path [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2168.236284] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20c8f331-3593-4696-8fbf-534cc43ad368 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.248917] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Created directory with path [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2168.248917] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Fetch image to [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2168.249134] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2168.249795] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cdde30-7733-4791-b7ad-7dda1e9e7cad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.257031] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21c9bdc-23cb-4563-9bf0-5309bcfdf852 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.266524] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2ea788-03ab-4a81-8790-1c1ea27ff48f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.298246] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403edb74-50c6-4b31-af55-80d12dd4cd54 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.304947] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9df0ef9b-7b59-4d14-a02a-fb5fde7fb6a9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.331370] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2168.379118] env[61964]: DEBUG oslo_vmware.api [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': task-1688697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084727} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.379624] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2168.379624] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2168.379624] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.379752] env[61964]: INFO nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Took 0.67 seconds to destroy the instance on the hypervisor. [ 2168.381944] env[61964]: DEBUG nova.compute.claims [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2168.382062] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2168.382294] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2168.502972] env[61964]: DEBUG oslo_vmware.rw_handles [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2168.565225] env[61964]: DEBUG oslo_vmware.rw_handles [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2168.565426] env[61964]: DEBUG oslo_vmware.rw_handles [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2168.753680] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7250ac7e-e93d-4b73-b4f1-28358a76b056 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.761804] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66648196-1a30-4d10-b49d-3e92e107372f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.793064] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8940c287-f7ee-4649-b7ac-475bcfed1cb9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.800608] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237ea095-fb6b-48b6-88d5-51276a306e05 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.815027] env[61964]: DEBUG nova.compute.provider_tree [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.823046] env[61964]: DEBUG nova.scheduler.client.report [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2168.837180] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.455s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2168.837728] env[61964]: ERROR nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.837728] env[61964]: Faults: ['InvalidArgument'] [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Traceback (most recent call last): [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self.driver.spawn(context, instance, image_meta, [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self._fetch_image_if_missing(context, vi) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] image_cache(vi, tmp_image_ds_loc) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] vm_util.copy_virtual_disk( [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] session._wait_for_task(vmdk_copy_task) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return self.wait_for_task(task_ref) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return evt.wait() [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] result = hub.switch() [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] return self.greenlet.switch() [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] self.f(*self.args, **self.kw) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] raise exceptions.translate_fault(task_info.error) [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Faults: ['InvalidArgument'] [ 2168.837728] env[61964]: ERROR nova.compute.manager [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] [ 2168.838606] env[61964]: DEBUG nova.compute.utils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2168.840069] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Build of instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c was re-scheduled: A specified parameter was not correct: fileType [ 2168.840069] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2168.840462] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2168.840630] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2168.840797] env[61964]: DEBUG nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2168.840961] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2169.733169] env[61964]: DEBUG nova.network.neutron [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.744439] env[61964]: INFO nova.compute.manager [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Took 0.90 seconds to deallocate network for instance. [ 2169.852513] env[61964]: INFO nova.scheduler.client.report [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleted allocations for instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c [ 2169.876040] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8943a3aa-9aba-4e7e-bac0-73febfaaf7a4 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 692.684s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2169.877269] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 493.122s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2169.877539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2169.877786] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2169.877961] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2169.880515] env[61964]: INFO nova.compute.manager [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Terminating instance [ 2169.882415] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2169.882620] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2169.882736] env[61964]: DEBUG nova.network.neutron [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2169.889050] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2169.909948] env[61964]: DEBUG nova.network.neutron [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2169.935145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2169.935415] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2169.937773] env[61964]: INFO nova.compute.claims [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2170.037125] env[61964]: DEBUG nova.network.neutron [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.045838] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "refresh_cache-ae85ff01-5625-453d-9dcf-c8417fbb6e0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2170.046234] env[61964]: DEBUG nova.compute.manager [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2170.046420] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2170.046951] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa7c6c2b-8c10-4b1b-a1c0-c0b14cfb3c7d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.057360] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116ab2ab-36aa-43af-960c-b98c8c26eec6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.089608] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae85ff01-5625-453d-9dcf-c8417fbb6e0c could not be found. [ 2170.089812] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2170.089985] env[61964]: INFO nova.compute.manager [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2170.090337] env[61964]: DEBUG oslo.service.loopingcall [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2170.092604] env[61964]: DEBUG nova.compute.manager [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2170.092700] env[61964]: DEBUG nova.network.neutron [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2170.111927] env[61964]: DEBUG nova.network.neutron [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2170.118628] env[61964]: DEBUG nova.network.neutron [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.126661] env[61964]: INFO nova.compute.manager [-] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] Took 0.03 seconds to deallocate network for instance. [ 2170.210141] env[61964]: DEBUG oslo_concurrency.lockutils [None req-183f68a6-9830-4990-81de-d1a31533d293 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.333s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2170.210972] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 140.609s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2170.211200] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: ae85ff01-5625-453d-9dcf-c8417fbb6e0c] During sync_power_state the instance has a pending task (deleting). Skip. [ 2170.211381] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "ae85ff01-5625-453d-9dcf-c8417fbb6e0c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2170.213224] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1076ab-0a3d-41bc-84e5-80bf6886faaa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.220652] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e9982b-380c-4721-83d2-0b2b46038f1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.250798] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377145ce-723e-42d2-889a-7dbf2029bb2d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.259053] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac11151a-c077-472a-a020-2ad25998b12d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.272870] env[61964]: DEBUG nova.compute.provider_tree [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.283603] env[61964]: DEBUG nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2170.298959] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.363s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2170.299472] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2170.332501] env[61964]: DEBUG nova.compute.utils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2170.333895] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2170.334077] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2170.344341] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2170.412427] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2170.433965] env[61964]: DEBUG nova.policy [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8562b3b762ad4ee7b526aef9a7aa144f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6407a885683a469b9696b99a724ad93a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2170.443371] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2170.444121] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2170.444121] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2170.444121] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2170.444121] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2170.444335] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2170.444382] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2170.444525] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2170.444706] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2170.445255] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2170.445255] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2170.445937] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f01d6f-4cdf-42e5-a2d2-5df1b06770ac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.454275] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e5dde9-d3bd-49cd-8cd9-165be8f068f0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.794375] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Successfully created port: e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2171.383658] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2171.383804] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2171.383924] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2171.408530] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.408708] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.408857] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.408989] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409148] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409452] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409452] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409529] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 63911858-5a79-4479-8c92-46afca980300] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409758] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409802] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2171.409907] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2171.410490] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2171.410686] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2171.481150] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Successfully updated port: e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2171.496533] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2171.496710] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2171.496864] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2171.539014] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2171.973433] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Updating instance_info_cache with network_info: [{"id": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "address": "fa:16:3e:22:0e:a8", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38270d1-c3", "ovs_interfaceid": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.990599] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2171.990599] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance network_info: |[{"id": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "address": "fa:16:3e:22:0e:a8", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38270d1-c3", "ovs_interfaceid": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2171.991096] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:0e:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e38270d1-c3b2-430d-9aaa-e33dd8aedc29', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2171.998624] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating folder: Project (6407a885683a469b9696b99a724ad93a). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2171.999965] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad1bb8fb-5ee6-4b03-ac2d-d811b153e9ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.009691] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created folder: Project (6407a885683a469b9696b99a724ad93a) in parent group-v351942. [ 2172.010010] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating folder: Instances. Parent ref: group-v352017. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2172.010145] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26bbfb82-31f1-4230-839f-c08633922f21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.019676] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created folder: Instances in parent group-v352017. [ 2172.019947] env[61964]: DEBUG oslo.service.loopingcall [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2172.020154] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2172.020354] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21833c46-9863-45c6-8d8a-cb794fd94258 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.038724] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2172.038724] env[61964]: value = "task-1688700" [ 2172.038724] env[61964]: _type = "Task" [ 2172.038724] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.045972] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688700, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.549507] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688700, 'name': CreateVM_Task, 'duration_secs': 0.281607} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.549679] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2172.550386] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2172.550537] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2172.550855] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2172.551154] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab939751-504a-48c2-b4d5-38e838d7acc5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.555615] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2172.555615] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52905c20-dbcc-ec49-9ace-4faad0037f3b" [ 2172.555615] env[61964]: _type = "Task" [ 2172.555615] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.562907] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52905c20-dbcc-ec49-9ace-4faad0037f3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.067311] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2173.067641] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2173.067789] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2173.302884] env[61964]: DEBUG nova.compute.manager [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Received event network-vif-plugged-e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2173.303129] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Acquiring lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2173.303338] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2173.303511] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2173.303675] env[61964]: DEBUG nova.compute.manager [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] No waiting events found dispatching network-vif-plugged-e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2173.303831] env[61964]: WARNING nova.compute.manager [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Received unexpected event network-vif-plugged-e38270d1-c3b2-430d-9aaa-e33dd8aedc29 for instance with vm_state building and task_state spawning. [ 2173.303986] env[61964]: DEBUG nova.compute.manager [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Received event network-changed-e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2173.304152] env[61964]: DEBUG nova.compute.manager [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Refreshing instance network info cache due to event network-changed-e38270d1-c3b2-430d-9aaa-e33dd8aedc29. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2173.304333] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Acquiring lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2173.304464] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Acquired lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2173.304613] env[61964]: DEBUG nova.network.neutron [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Refreshing network info cache for port e38270d1-c3b2-430d-9aaa-e33dd8aedc29 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2173.594820] env[61964]: DEBUG nova.network.neutron [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Updated VIF entry in instance network info cache for port e38270d1-c3b2-430d-9aaa-e33dd8aedc29. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2173.595117] env[61964]: DEBUG nova.network.neutron [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Updating instance_info_cache with network_info: [{"id": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "address": "fa:16:3e:22:0e:a8", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape38270d1-c3", "ovs_interfaceid": "e38270d1-c3b2-430d-9aaa-e33dd8aedc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.604108] env[61964]: DEBUG oslo_concurrency.lockutils [req-735146fc-1d99-48de-aa86-cd269e3b273c req-819551a0-29ba-4a83-9915-1efb7640a76b service nova] Releasing lock "refresh_cache-9ae01818-da08-4137-97c0-bc4c57759d46" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2174.190892] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2179.383876] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2179.384187] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2180.384784] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2180.384784] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.378902] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.383520] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.384174] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.397323] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2185.397561] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2185.397766] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2185.397978] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2185.399168] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a61cae9-7d3d-4074-a6c1-dd09abe81fc7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.409107] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7292dc05-72e5-4870-a146-cd33b13a3fac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.429808] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6f4e8e-77a4-4d70-9270-44fb09b9bb41 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.436752] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b288c8-d20c-4495-927d-11cb72dfc522 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.467089] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181180MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2185.467273] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2185.467465] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2185.556902] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557078] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2b69def4-b892-4d76-bfd2-841014f75098 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557206] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557328] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557444] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557559] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557676] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557787] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.557896] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.558013] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2185.576632] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.589573] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.600543] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance d0cd87ae-53f6-4b03-9b49-b84b34cea243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.615438] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance abb43b76-b3df-4d4a-b1f4-801306d0f01f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.625495] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.642346] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2f54f015-36dd-4390-a8a3-afa767581e44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.652392] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 79a8f962-d252-4895-9ac3-a2e214fc7d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.664960] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 97aa9519-8986-49b9-a5d3-5a24968c709b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.677685] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 469107ae-93b5-467d-94d5-d9c78766a934 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2185.678814] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2185.678814] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2185.986261] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76290885-0c35-4be3-8a62-753f2d2edc0b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.993526] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f929fc8-973c-4f46-9914-cdf48916b41f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.023138] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cacabe0-ef2a-4c93-b34b-d6acc7aa0c9b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.030563] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe386294-90e4-464e-8671-a49b7e63e871 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.043496] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2186.052183] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2186.066347] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2186.066532] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.599s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2187.968399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "f5589fbe-df43-4407-b63a-5e4f96021b61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2187.968691] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2214.464870] env[61964]: WARNING oslo_vmware.rw_handles [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2214.464870] env[61964]: ERROR oslo_vmware.rw_handles [ 2214.465435] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2214.466973] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2214.467248] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Copying Virtual Disk [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/d24da0cf-6a43-43e3-a232-f9790c246635/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2214.467535] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38d15707-125e-4321-9cfa-c2b53dac4d5b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.475107] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for the task: (returnval){ [ 2214.475107] env[61964]: value = "task-1688701" [ 2214.475107] env[61964]: _type = "Task" [ 2214.475107] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.482730] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Task: {'id': task-1688701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.985127] env[61964]: DEBUG oslo_vmware.exceptions [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2214.986045] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2214.986045] env[61964]: ERROR nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2214.986045] env[61964]: Faults: ['InvalidArgument'] [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Traceback (most recent call last): [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] yield resources [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self.driver.spawn(context, instance, image_meta, [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self._fetch_image_if_missing(context, vi) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] image_cache(vi, tmp_image_ds_loc) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] vm_util.copy_virtual_disk( [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] session._wait_for_task(vmdk_copy_task) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return self.wait_for_task(task_ref) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return evt.wait() [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] result = hub.switch() [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return self.greenlet.switch() [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self.f(*self.args, **self.kw) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] raise exceptions.translate_fault(task_info.error) [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Faults: ['InvalidArgument'] [ 2214.986045] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] [ 2214.987134] env[61964]: INFO nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Terminating instance [ 2214.987776] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2214.988044] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2214.988224] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17031fcc-5f1f-4674-91a3-eb15193fa2f6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.990645] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2214.990831] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2214.991607] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a7b8bd-3776-4b9c-9752-22ae2c9f46e5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.998453] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2214.998693] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76d2e337-7d47-4409-922e-7960aaba1b83 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.000930] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.001114] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2215.002089] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdc12c0d-7c9c-4e76-84fa-830df8080e48 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.006796] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 2215.006796] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523dc914-eb4f-d187-7dc4-f2b7ecd84f8a" [ 2215.006796] env[61964]: _type = "Task" [ 2215.006796] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.020734] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2215.020966] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Creating directory with path [datastore1] vmware_temp/f2b1cd3a-9540-4c5d-b030-973899d37df4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2215.021204] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfd7d24b-5677-4719-9abb-5f3eb930a4c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.043631] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Created directory with path [datastore1] vmware_temp/f2b1cd3a-9540-4c5d-b030-973899d37df4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.043753] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Fetch image to [datastore1] vmware_temp/f2b1cd3a-9540-4c5d-b030-973899d37df4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2215.043928] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/f2b1cd3a-9540-4c5d-b030-973899d37df4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2215.044729] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb9bbb7-2468-47c7-ba5c-7ff80fe10dc3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.052044] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5947c9e7-2921-44cc-92eb-00bf19f71660 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.060972] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82f83df-aae3-4594-a6f1-0c07102a222c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.092845] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381fcebf-45c3-4aa4-a83e-c9ed1e0db7d3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.098913] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1653c5d6-81eb-4121-ada5-4471878febcf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.109714] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2215.109965] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2215.110231] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Deleting the datastore file [datastore1] 032f2d6d-04c3-4210-a8d0-1c325a304a88 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2215.110495] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a529a1f6-fffc-4192-8b23-392bc1afe740 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.117221] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for the task: (returnval){ [ 2215.117221] env[61964]: value = "task-1688703" [ 2215.117221] env[61964]: _type = "Task" [ 2215.117221] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.124913] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Task: {'id': task-1688703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.188778] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2215.333727] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2215.335395] env[61964]: ERROR nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] yield resources [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.driver.spawn(context, instance, image_meta, [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._fetch_image_if_missing(context, vi) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image_fetch(context, vi, tmp_image_ds_loc) [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] images.fetch_image( [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2215.335395] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] metadata = IMAGE_API.get(context, image_ref) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return session.show(context, image_id, [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] _reraise_translated_image_exception(image_id) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise new_exc.with_traceback(exc_trace) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2215.336309] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2215.336309] env[61964]: INFO nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Terminating instance [ 2215.337358] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2215.337570] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2215.338221] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2215.338408] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2215.338635] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a62141eb-9613-458d-9a4e-699c8f0955ea {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.341326] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42db0d76-a2a1-4051-8661-edceeada0869 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.348871] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2215.349095] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73783cbf-f349-4741-bd4a-101468a97cbe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.351338] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.351513] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2215.352524] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28539db3-a291-4b60-bd52-20f55525b396 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.356965] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for the task: (returnval){ [ 2215.356965] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c45fa8-7514-82d4-d19e-6ca390f49060" [ 2215.356965] env[61964]: _type = "Task" [ 2215.356965] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.364184] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c45fa8-7514-82d4-d19e-6ca390f49060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.418971] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2215.419198] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2215.419378] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleting the datastore file [datastore1] 2b69def4-b892-4d76-bfd2-841014f75098 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2215.419709] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5c0238f-cac1-4584-a9b6-e667db20ded0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.425644] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for the task: (returnval){ [ 2215.425644] env[61964]: value = "task-1688705" [ 2215.425644] env[61964]: _type = "Task" [ 2215.425644] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.432891] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.627790] env[61964]: DEBUG oslo_vmware.api [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Task: {'id': task-1688703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069063} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.628150] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2215.628194] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2215.628347] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2215.628516] env[61964]: INFO nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2215.630624] env[61964]: DEBUG nova.compute.claims [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2215.630799] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2215.631015] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2215.867052] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2215.867333] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Creating directory with path [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2215.867542] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbee3d82-5eeb-4e42-95fe-65dd76877ed4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.878685] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Created directory with path [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.878895] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Fetch image to [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2215.879075] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2215.879830] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f5171a-f149-4aa3-b8bc-8084b6fede65 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.888595] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1850bf3-f333-4b9c-930e-e081c2975823 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.898820] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfe760d-4cad-4c11-bf00-e775e8d87c4e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.901946] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac98a86-0425-480b-a578-4dfea7c67a5e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.932878] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab42edf-81c6-4e11-a5a1-7c986bbd0bb6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.939088] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221e2465-057d-49f4-be5f-e18df098c2b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.971844] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7146d0c-8f1f-47a5-9bd7-f0b778369ec3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.974269] env[61964]: DEBUG oslo_vmware.api [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Task: {'id': task-1688705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067429} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.974902] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2215.975132] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2215.975309] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2215.975474] env[61964]: INFO nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2215.979040] env[61964]: DEBUG nova.compute.claims [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2215.979204] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2215.979417] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1cdefd55-1c6d-40cc-8602-7d4417f6db67 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.984112] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f98b6d8-eed9-408a-b169-47a47b9ffe46 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.997336] env[61964]: DEBUG nova.compute.provider_tree [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2216.003028] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2216.008781] env[61964]: DEBUG nova.scheduler.client.report [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2216.048376] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.417s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.048904] env[61964]: ERROR nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2216.048904] env[61964]: Faults: ['InvalidArgument'] [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Traceback (most recent call last): [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self.driver.spawn(context, instance, image_meta, [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self._fetch_image_if_missing(context, vi) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] image_cache(vi, tmp_image_ds_loc) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] vm_util.copy_virtual_disk( [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] session._wait_for_task(vmdk_copy_task) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return self.wait_for_task(task_ref) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return evt.wait() [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] result = hub.switch() [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] return self.greenlet.switch() [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] self.f(*self.args, **self.kw) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] raise exceptions.translate_fault(task_info.error) [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Faults: ['InvalidArgument'] [ 2216.048904] env[61964]: ERROR nova.compute.manager [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] [ 2216.049957] env[61964]: DEBUG nova.compute.utils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2216.051169] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.072s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.054311] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Build of instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 was re-scheduled: A specified parameter was not correct: fileType [ 2216.054311] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2216.056466] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2216.056466] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2216.056466] env[61964]: DEBUG nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2216.056466] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.057616] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2216.122450] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2216.122450] env[61964]: DEBUG oslo_vmware.rw_handles [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2216.358304] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a50a2a1-0bca-4a3c-a9f8-45e8fbb13faa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.366494] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20535a0-2895-440e-a8c4-71a719bc5ed7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.396426] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1280165-d6e2-4b25-bec7-b1cc3df53da3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.403386] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cbc843-49c3-412d-8fcd-1e8aa5f2b053 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.416179] env[61964]: DEBUG nova.compute.provider_tree [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2216.427495] env[61964]: DEBUG nova.scheduler.client.report [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2216.450191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.399s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.450924] env[61964]: ERROR nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.driver.spawn(context, instance, image_meta, [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._fetch_image_if_missing(context, vi) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image_fetch(context, vi, tmp_image_ds_loc) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] images.fetch_image( [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] metadata = IMAGE_API.get(context, image_ref) [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2216.450924] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return session.show(context, image_id, [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] _reraise_translated_image_exception(image_id) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise new_exc.with_traceback(exc_trace) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2216.451908] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.451908] env[61964]: DEBUG nova.compute.utils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2216.453107] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Build of instance 2b69def4-b892-4d76-bfd2-841014f75098 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2216.455580] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2216.455580] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2216.455580] env[61964]: DEBUG nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2216.455580] env[61964]: DEBUG nova.network.neutron [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.476770] env[61964]: DEBUG nova.network.neutron [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.491145] env[61964]: INFO nova.compute.manager [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Took 0.44 seconds to deallocate network for instance. [ 2216.578231] env[61964]: DEBUG neutronclient.v2_0.client [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2216.579955] env[61964]: ERROR nova.compute.manager [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.driver.spawn(context, instance, image_meta, [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._fetch_image_if_missing(context, vi) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image_fetch(context, vi, tmp_image_ds_loc) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] images.fetch_image( [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] metadata = IMAGE_API.get(context, image_ref) [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2216.579955] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return session.show(context, image_id, [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] _reraise_translated_image_exception(image_id) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise new_exc.with_traceback(exc_trace) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = getattr(controller, method)(*args, **kwargs) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._get(image_id) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] resp, body = self.http_client.get(url, headers=header) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.request(url, 'GET', **kwargs) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self._handle_response(resp) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exc.from_response(resp, resp.content) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._build_and_run_instance(context, instance, image, [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exception.RescheduledException( [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.RescheduledException: Build of instance 2b69def4-b892-4d76-bfd2-841014f75098 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] exception_handler_v20(status_code, error_body) [ 2216.580840] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise client_exc(message=error_message, [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Neutron server returns request_ids: ['req-5a52093a-d3e6-401a-9d67-fb5051e9a3a8'] [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._deallocate_network(context, instance, requested_networks) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.network_api.deallocate_for_instance( [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] data = neutron.list_ports(**search_opts) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.list('ports', self.ports_path, retrieve_all, [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] for r in self._pagination(collection, path, **params): [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] res = self.get(path, params=params) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.retry_request("GET", action, body=body, [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.do_request(method, action, body=body, [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._handle_fault_response(status_code, replybody, resp) [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exception.Unauthorized() [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.Unauthorized: Not authorized. [ 2216.581820] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.599353] env[61964]: INFO nova.scheduler.client.report [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Deleted allocations for instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 [ 2216.622574] env[61964]: DEBUG oslo_concurrency.lockutils [None req-79a1d731-b43a-4d1e-862c-fe3a5acf65c2 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 680.469s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.624025] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 484.382s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.624025] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Acquiring lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2216.624240] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.624414] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.627759] env[61964]: INFO nova.compute.manager [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Terminating instance [ 2216.629705] env[61964]: DEBUG nova.compute.manager [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2216.629962] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2216.630266] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-748eddc1-521e-466b-9d7c-8e9a39807909 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.639880] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d217075-3779-4f27-bb0e-9bc38af596b6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.651371] env[61964]: INFO nova.scheduler.client.report [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Deleted allocations for instance 2b69def4-b892-4d76-bfd2-841014f75098 [ 2216.656258] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2216.676221] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 032f2d6d-04c3-4210-a8d0-1c325a304a88 could not be found. [ 2216.676423] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2216.676651] env[61964]: INFO nova.compute.manager [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2216.676839] env[61964]: DEBUG oslo.service.loopingcall [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.677222] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3525f58d-9f83-43e7-b23a-e83f858afd84 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.625s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.677402] env[61964]: DEBUG nova.compute.manager [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2216.677498] env[61964]: DEBUG nova.network.neutron [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.680153] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.897s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.680356] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Acquiring lock "2b69def4-b892-4d76-bfd2-841014f75098-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2216.680549] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.680709] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.682766] env[61964]: INFO nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Terminating instance [ 2216.684738] env[61964]: DEBUG nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2216.684914] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2216.685156] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32513d29-0568-4216-885f-09a8be33975a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.688106] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2216.696368] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481fae41-52fd-4b86-badd-12c3b7042e99 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.717235] env[61964]: DEBUG nova.network.neutron [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.729135] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b69def4-b892-4d76-bfd2-841014f75098 could not be found. [ 2216.729366] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2216.729544] env[61964]: INFO nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2216.729784] env[61964]: DEBUG oslo.service.loopingcall [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.730345] env[61964]: DEBUG nova.compute.manager [-] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2216.730483] env[61964]: DEBUG nova.network.neutron [-] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.736040] env[61964]: INFO nova.compute.manager [-] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] Took 0.06 seconds to deallocate network for instance. [ 2216.742609] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2216.742843] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.744253] env[61964]: INFO nova.compute.claims [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2216.765292] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2216.831864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-acef4973-e464-4ead-9ece-0f227cb3f463 tempest-FloatingIPsAssociationTestJSON-1105664005 tempest-FloatingIPsAssociationTestJSON-1105664005-project-member] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.208s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.832856] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 187.230s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.832856] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 032f2d6d-04c3-4210-a8d0-1c325a304a88] During sync_power_state the instance has a pending task (deleting). Skip. [ 2216.833081] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "032f2d6d-04c3-4210-a8d0-1c325a304a88" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.844551] env[61964]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2216.844817] env[61964]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-0c4699d7-c3fa-45c4-934b-381b6d007da1'] [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.845652] env[61964]: ERROR oslo.service.loopingcall [ 2216.847135] env[61964]: ERROR nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.877040] env[61964]: ERROR nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] exception_handler_v20(status_code, error_body) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise client_exc(message=error_message, [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Neutron server returns request_ids: ['req-0c4699d7-c3fa-45c4-934b-381b6d007da1'] [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During handling of the above exception, another exception occurred: [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Traceback (most recent call last): [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._delete_instance(context, instance, bdms) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._shutdown_instance(context, instance, bdms) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._try_deallocate_network(context, instance, requested_networks) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] with excutils.save_and_reraise_exception(): [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.force_reraise() [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise self.value [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] _deallocate_network_with_retries() [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return evt.wait() [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = hub.switch() [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.greenlet.switch() [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = func(*self.args, **self.kw) [ 2216.877040] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] result = f(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._deallocate_network( [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self.network_api.deallocate_for_instance( [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] data = neutron.list_ports(**search_opts) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.list('ports', self.ports_path, retrieve_all, [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] for r in self._pagination(collection, path, **params): [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] res = self.get(path, params=params) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.retry_request("GET", action, body=body, [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] return self.do_request(method, action, body=body, [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] ret = obj(*args, **kwargs) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] self._handle_fault_response(status_code, replybody, resp) [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.878089] env[61964]: ERROR nova.compute.manager [instance: 2b69def4-b892-4d76-bfd2-841014f75098] [ 2216.904527] env[61964]: DEBUG oslo_concurrency.lockutils [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Lock "2b69def4-b892-4d76-bfd2-841014f75098" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.224s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.906032] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "2b69def4-b892-4d76-bfd2-841014f75098" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 187.303s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2216.906244] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] During sync_power_state the instance has a pending task (deleting). Skip. [ 2216.906426] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "2b69def4-b892-4d76-bfd2-841014f75098" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.959795] env[61964]: INFO nova.compute.manager [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] [instance: 2b69def4-b892-4d76-bfd2-841014f75098] Successfully reverted task state from None on failure for instance. [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server [None req-e3fd3e17-07fb-4764-8f6c-148fda7ce155 tempest-DeleteServersAdminTestJSON-201763809 tempest-DeleteServersAdminTestJSON-201763809-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-0c4699d7-c3fa-45c4-934b-381b6d007da1'] [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 2216.965858] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2216.967300] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2216.968673] env[61964]: ERROR oslo_messaging.rpc.server [ 2217.029414] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be37d698-ba28-41fb-9e4d-12f0690c34a8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.037514] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cd1e1b-97bc-4607-8565-0092c78edf4f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.066550] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e7e6dd-2971-4248-acf8-79845925f15b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.073732] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b49696b-ceca-4325-9eff-bd0e7ac12f4a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.086476] env[61964]: DEBUG nova.compute.provider_tree [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.095447] env[61964]: DEBUG nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2217.111123] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2217.111647] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2217.114006] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.349s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2217.117023] env[61964]: INFO nova.compute.claims [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2217.144356] env[61964]: DEBUG nova.compute.utils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2217.145535] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2217.145718] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2217.153558] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2217.220297] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2217.225460] env[61964]: DEBUG nova.policy [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8562b3b762ad4ee7b526aef9a7aa144f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6407a885683a469b9696b99a724ad93a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2217.248657] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2217.248897] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2217.249067] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2217.249251] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2217.249396] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2217.249539] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2217.249740] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2217.249898] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2217.250077] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2217.250241] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2217.250412] env[61964]: DEBUG nova.virt.hardware [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2217.251271] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd939f72-2d13-4e04-8067-c8dfb33ec5b1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.261646] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf60f79-3720-49b9-8c7e-275babea6604 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.397122] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679d48c1-63c2-42f7-bfb4-b0fe8585f9f3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.404746] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326917e3-2d97-40b6-81b2-c9c8043b8029 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.438810] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2972b0-4226-4b30-aa30-acae308d8a37 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.446599] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08cc79b-9a1e-4504-9bea-e733afb7d98f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.462963] env[61964]: DEBUG nova.compute.provider_tree [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.473723] env[61964]: DEBUG nova.scheduler.client.report [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2217.497871] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.384s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2217.498890] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2217.542151] env[61964]: DEBUG nova.compute.utils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2217.543447] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2217.543618] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2217.558717] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2217.607935] env[61964]: DEBUG nova.policy [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eba0b13c97dc427ab0da4fdbd71ae85d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c129e8a559b4264927ff0d2510aa439', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2217.630412] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2217.665138] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2217.665731] env[61964]: DEBUG nova.virt.hardware [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2217.666946] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15086b5e-6b66-4800-a35a-5c1df6de4656 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.675248] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba8fbbf-cdd3-48a6-a6e3-6f229ca38165 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.784152] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Successfully created port: 29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2217.944548] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Successfully created port: cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2218.511691] env[61964]: DEBUG nova.compute.manager [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Received event network-vif-plugged-29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2218.512007] env[61964]: DEBUG oslo_concurrency.lockutils [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] Acquiring lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2218.512243] env[61964]: DEBUG oslo_concurrency.lockutils [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2218.512414] env[61964]: DEBUG oslo_concurrency.lockutils [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2218.512609] env[61964]: DEBUG nova.compute.manager [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] No waiting events found dispatching network-vif-plugged-29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2218.512797] env[61964]: WARNING nova.compute.manager [req-70207b7d-5511-42c4-a619-eef274443ef1 req-93c5708a-5c47-4aeb-b956-5c5ccc9237be service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Received unexpected event network-vif-plugged-29af5994-d39f-4e1f-a787-5b27be8d7679 for instance with vm_state building and task_state spawning. [ 2218.591561] env[61964]: DEBUG nova.compute.manager [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Received event network-vif-plugged-cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2218.591809] env[61964]: DEBUG oslo_concurrency.lockutils [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] Acquiring lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2218.592030] env[61964]: DEBUG oslo_concurrency.lockutils [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2218.592293] env[61964]: DEBUG oslo_concurrency.lockutils [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2218.592358] env[61964]: DEBUG nova.compute.manager [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] No waiting events found dispatching network-vif-plugged-cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2218.592517] env[61964]: WARNING nova.compute.manager [req-fde676e6-c89d-4896-8523-fdd9712dea84 req-c39267cb-db7b-4c2c-8cf2-25ca94326490 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Received unexpected event network-vif-plugged-cbc594fd-9253-41f2-ac63-faae62db282f for instance with vm_state building and task_state spawning. [ 2218.612075] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Successfully updated port: 29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.623202] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2218.623346] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2218.623498] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2218.673514] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Successfully updated port: cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.681447] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2218.688975] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2218.689131] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2218.689274] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2218.739586] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2218.898706] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Updating instance_info_cache with network_info: [{"id": "cbc594fd-9253-41f2-ac63-faae62db282f", "address": "fa:16:3e:71:7d:32", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc594fd-92", "ovs_interfaceid": "cbc594fd-9253-41f2-ac63-faae62db282f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.912184] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2218.912468] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance network_info: |[{"id": "cbc594fd-9253-41f2-ac63-faae62db282f", "address": "fa:16:3e:71:7d:32", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc594fd-92", "ovs_interfaceid": "cbc594fd-9253-41f2-ac63-faae62db282f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2218.912875] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:7d:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a92a4ffe-7939-4697-bf98-5b22e2c7feda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbc594fd-9253-41f2-ac63-faae62db282f', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2218.920379] env[61964]: DEBUG oslo.service.loopingcall [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.920797] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2218.921037] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e22631cb-b9f0-44af-a3e3-51de5b3dc6c4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.935934] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Updating instance_info_cache with network_info: [{"id": "29af5994-d39f-4e1f-a787-5b27be8d7679", "address": "fa:16:3e:8b:f6:c7", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29af5994-d3", "ovs_interfaceid": "29af5994-d39f-4e1f-a787-5b27be8d7679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.943017] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2218.943017] env[61964]: value = "task-1688706" [ 2218.943017] env[61964]: _type = "Task" [ 2218.943017] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.946446] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2218.946701] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance network_info: |[{"id": "29af5994-d39f-4e1f-a787-5b27be8d7679", "address": "fa:16:3e:8b:f6:c7", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29af5994-d3", "ovs_interfaceid": "29af5994-d39f-4e1f-a787-5b27be8d7679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2218.947403] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:f6:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29af5994-d39f-4e1f-a787-5b27be8d7679', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2218.954750] env[61964]: DEBUG oslo.service.loopingcall [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.959294] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2218.959487] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688706, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.959669] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cef412ee-4ca7-4238-94a7-331c89737a2d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.977534] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2218.977534] env[61964]: value = "task-1688707" [ 2218.977534] env[61964]: _type = "Task" [ 2218.977534] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.984940] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688707, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.452787] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688706, 'name': CreateVM_Task, 'duration_secs': 0.383399} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.452966] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2219.453668] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2219.453835] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2219.454189] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2219.454440] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc8f9be-cc99-4cc4-8bcb-5e6e19b57fa4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.458730] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2219.458730] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4a67a-8ef4-8b3a-5d34-699159fc1112" [ 2219.458730] env[61964]: _type = "Task" [ 2219.458730] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.466237] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4a67a-8ef4-8b3a-5d34-699159fc1112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.486048] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688707, 'name': CreateVM_Task, 'duration_secs': 0.371124} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.486213] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2219.486804] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2219.969334] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2219.969694] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2219.969967] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2219.970226] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2219.970570] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2219.970835] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0e90cb-20de-42f1-8552-2017a8e66761 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.975460] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2219.975460] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b71802-a649-3fe0-b5dc-2aae027ad4cd" [ 2219.975460] env[61964]: _type = "Task" [ 2219.975460] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.983090] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b71802-a649-3fe0-b5dc-2aae027ad4cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.485937] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2220.486201] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2220.486411] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2220.542790] env[61964]: DEBUG nova.compute.manager [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Received event network-changed-29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2220.543014] env[61964]: DEBUG nova.compute.manager [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Refreshing instance network info cache due to event network-changed-29af5994-d39f-4e1f-a787-5b27be8d7679. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2220.543237] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] Acquiring lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2220.543380] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] Acquired lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2220.543539] env[61964]: DEBUG nova.network.neutron [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Refreshing network info cache for port 29af5994-d39f-4e1f-a787-5b27be8d7679 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2220.622329] env[61964]: DEBUG nova.compute.manager [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Received event network-changed-cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2220.622329] env[61964]: DEBUG nova.compute.manager [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Refreshing instance network info cache due to event network-changed-cbc594fd-9253-41f2-ac63-faae62db282f. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2220.622329] env[61964]: DEBUG oslo_concurrency.lockutils [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] Acquiring lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2220.622329] env[61964]: DEBUG oslo_concurrency.lockutils [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] Acquired lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2220.622832] env[61964]: DEBUG nova.network.neutron [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Refreshing network info cache for port cbc594fd-9253-41f2-ac63-faae62db282f {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2220.874478] env[61964]: DEBUG nova.network.neutron [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Updated VIF entry in instance network info cache for port 29af5994-d39f-4e1f-a787-5b27be8d7679. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2220.874836] env[61964]: DEBUG nova.network.neutron [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Updating instance_info_cache with network_info: [{"id": "29af5994-d39f-4e1f-a787-5b27be8d7679", "address": "fa:16:3e:8b:f6:c7", "network": {"id": "2314ecbe-7750-4d93-9dfa-c2980f17cde9", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-790985375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6407a885683a469b9696b99a724ad93a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29af5994-d3", "ovs_interfaceid": "29af5994-d39f-4e1f-a787-5b27be8d7679", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2220.887482] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8b0d428-223e-4769-a2d7-7aa01bbc574f req-d6256f7b-8813-4072-b2d7-819a01b9c5ec service nova] Releasing lock "refresh_cache-bae8f345-41e7-4adb-a44c-d91347fb4c7a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2221.120290] env[61964]: DEBUG nova.network.neutron [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Updated VIF entry in instance network info cache for port cbc594fd-9253-41f2-ac63-faae62db282f. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2221.120664] env[61964]: DEBUG nova.network.neutron [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Updating instance_info_cache with network_info: [{"id": "cbc594fd-9253-41f2-ac63-faae62db282f", "address": "fa:16:3e:71:7d:32", "network": {"id": "3852b29f-f2c2-4604-b229-1d5952e819eb", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1568272893-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c129e8a559b4264927ff0d2510aa439", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a92a4ffe-7939-4697-bf98-5b22e2c7feda", "external-id": "nsx-vlan-transportzone-732", "segmentation_id": 732, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc594fd-92", "ovs_interfaceid": "cbc594fd-9253-41f2-ac63-faae62db282f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2221.129665] env[61964]: DEBUG oslo_concurrency.lockutils [req-d944b924-f593-4164-b0b8-a2739654e180 req-49c05ece-f787-4256-927d-1db776b68a58 service nova] Releasing lock "refresh_cache-4c21315f-18a1-4cc4-b4b7-cfb07b06c379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2225.189429] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2225.189820] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2233.067731] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.068087] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2233.068087] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2233.089824] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.090075] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.090257] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.090431] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.090588] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.090735] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 63911858-5a79-4479-8c92-46afca980300] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.091009] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.091200] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.091367] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.091517] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2233.091662] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2233.092296] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.384331] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.383957] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.383957] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2240.384565] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.383761] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2242.384361] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2243.380582] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.383753] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.396307] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2246.396515] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2246.396680] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2246.396835] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2246.398034] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb27858d-204c-463e-a2a4-ae074ba297a6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.407089] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a408ca6-6255-4495-a32c-03f6708333a7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.420938] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c779e17-c98e-4d87-84f0-3a1dadebf860 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.427336] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85fa957-fc24-43da-b14e-bcf5b730acbc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.458159] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181371MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2246.458307] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2246.458536] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2246.527952] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528252] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528435] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528566] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528694] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528810] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.528925] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.529121] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.529253] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.529369] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2246.541712] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance d0cd87ae-53f6-4b03-9b49-b84b34cea243 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.552315] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance abb43b76-b3df-4d4a-b1f4-801306d0f01f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.562219] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.572065] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 2f54f015-36dd-4390-a8a3-afa767581e44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.582392] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 79a8f962-d252-4895-9ac3-a2e214fc7d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.591558] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 97aa9519-8986-49b9-a5d3-5a24968c709b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.601540] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 469107ae-93b5-467d-94d5-d9c78766a934 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.611052] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.619547] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2246.619832] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2246.619996] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2246.817320] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5a41ad-2450-41bf-bc17-4214c881820c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.824757] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7f6059-be77-448c-9f24-8e18b0d3b881 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.853820] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e669a93-a366-4c1b-bf68-cfaa49e99fc8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.860716] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a220c545-1498-41cf-bcab-8b8e42c70057 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.874238] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2246.883034] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2246.898333] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2246.898510] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.440s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2247.894652] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.127712] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2252.197616] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "9ae01818-da08-4137-97c0-bc4c57759d46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2252.770739] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "031156ba-251e-4b8b-86bd-9c967adc808f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2252.771036] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2257.400220] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2259.297143] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2259.297426] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2262.726275] env[61964]: WARNING oslo_vmware.rw_handles [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2262.726275] env[61964]: ERROR oslo_vmware.rw_handles [ 2262.726900] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2262.728415] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2262.728658] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Copying Virtual Disk [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/8855a9a3-0723-4c56-929c-d4a8601c21dd/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2262.728937] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b16d7009-7eda-4554-a92a-d26a6a88dca7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.736293] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for the task: (returnval){ [ 2262.736293] env[61964]: value = "task-1688708" [ 2262.736293] env[61964]: _type = "Task" [ 2262.736293] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.744080] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Task: {'id': task-1688708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.247734] env[61964]: DEBUG oslo_vmware.exceptions [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2263.248032] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2263.248573] env[61964]: ERROR nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2263.248573] env[61964]: Faults: ['InvalidArgument'] [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Traceback (most recent call last): [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] yield resources [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self.driver.spawn(context, instance, image_meta, [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self._fetch_image_if_missing(context, vi) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] image_cache(vi, tmp_image_ds_loc) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] vm_util.copy_virtual_disk( [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] session._wait_for_task(vmdk_copy_task) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return self.wait_for_task(task_ref) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return evt.wait() [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] result = hub.switch() [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return self.greenlet.switch() [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self.f(*self.args, **self.kw) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] raise exceptions.translate_fault(task_info.error) [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Faults: ['InvalidArgument'] [ 2263.248573] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] [ 2263.249563] env[61964]: INFO nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Terminating instance [ 2263.251023] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2263.251023] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2263.251023] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61862bf7-5456-4f2a-bf3d-cd97ab6ac295 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.253067] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2263.253264] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2263.253967] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f071cdf-9efe-46b2-8439-9f9c70987434 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.261066] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2263.261954] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d11203ad-d98a-4e61-a6fe-1ee23fb0d791 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.263346] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2263.263510] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2263.264184] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bec96a9-0160-4704-af00-603a31cb8c12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.269623] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for the task: (returnval){ [ 2263.269623] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52fe58b7-d2d1-6ceb-a681-23cd8b89e929" [ 2263.269623] env[61964]: _type = "Task" [ 2263.269623] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.277871] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52fe58b7-d2d1-6ceb-a681-23cd8b89e929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.340046] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2263.340046] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2263.340238] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Deleting the datastore file [datastore1] c73b38c1-53d0-4c98-814f-b6b8984bbaf5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2263.340574] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f57247e7-3aae-4af4-960b-9df9b90cc427 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.346991] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for the task: (returnval){ [ 2263.346991] env[61964]: value = "task-1688710" [ 2263.346991] env[61964]: _type = "Task" [ 2263.346991] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.354580] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Task: {'id': task-1688710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.780318] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2263.780601] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Creating directory with path [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2263.780873] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b0ad4ce-f2cd-42e3-b4de-a8e5113ee6d2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.791880] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Created directory with path [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2263.792121] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Fetch image to [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2263.792280] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2263.793014] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2ea1ef-ade7-4308-a75f-3d545f4a5b50 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.799583] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a2b503-ccc0-481c-a53a-ca4659e9195a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.808383] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede14a0d-5ae6-41c4-8222-dc14d4f30971 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.838799] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa090112-a310-419c-b0ac-33528f9d039d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.843835] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e2e2ccaa-b6a0-4599-a249-8bb300f1ed7d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.855554] env[61964]: DEBUG oslo_vmware.api [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Task: {'id': task-1688710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067652} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.855802] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2263.855996] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2263.856185] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2263.856350] env[61964]: INFO nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2263.858375] env[61964]: DEBUG nova.compute.claims [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2263.858544] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2263.858754] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2263.863251] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2263.917943] env[61964]: DEBUG oslo_vmware.rw_handles [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2263.979921] env[61964]: DEBUG oslo_vmware.rw_handles [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2263.980161] env[61964]: DEBUG oslo_vmware.rw_handles [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2264.206115] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1ec1e9-1453-4be5-932d-fe3aa7c89732 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.213900] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706fa573-9907-4d30-bd12-8d1151e5fc0d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.243210] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee77d3-6f49-46cc-815f-24c234cfabda {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.250549] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76c6816-416d-4eb3-84c6-3635f93fb05c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.263463] env[61964]: DEBUG nova.compute.provider_tree [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.272232] env[61964]: DEBUG nova.scheduler.client.report [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2264.285899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.427s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2264.286431] env[61964]: ERROR nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2264.286431] env[61964]: Faults: ['InvalidArgument'] [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Traceback (most recent call last): [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self.driver.spawn(context, instance, image_meta, [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self._fetch_image_if_missing(context, vi) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] image_cache(vi, tmp_image_ds_loc) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] vm_util.copy_virtual_disk( [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] session._wait_for_task(vmdk_copy_task) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return self.wait_for_task(task_ref) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return evt.wait() [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] result = hub.switch() [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] return self.greenlet.switch() [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] self.f(*self.args, **self.kw) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] raise exceptions.translate_fault(task_info.error) [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Faults: ['InvalidArgument'] [ 2264.286431] env[61964]: ERROR nova.compute.manager [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] [ 2264.287342] env[61964]: DEBUG nova.compute.utils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2264.288811] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Build of instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 was re-scheduled: A specified parameter was not correct: fileType [ 2264.288811] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2264.288934] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2264.289024] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2264.289939] env[61964]: DEBUG nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2264.289939] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2264.772220] env[61964]: DEBUG nova.network.neutron [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.783730] env[61964]: INFO nova.compute.manager [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Took 0.49 seconds to deallocate network for instance. [ 2264.875634] env[61964]: INFO nova.scheduler.client.report [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Deleted allocations for instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 [ 2264.895771] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7047ac15-1ecf-4fe6-b246-c733e50b8664 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.136s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2264.896941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.026s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2264.897201] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Acquiring lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2264.897411] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2264.897580] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2264.899541] env[61964]: INFO nova.compute.manager [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Terminating instance [ 2264.901173] env[61964]: DEBUG nova.compute.manager [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2264.901362] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2264.901878] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4291399-11c5-4f42-912b-a861b75628bd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.911947] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0982ade5-cd57-43bb-b40a-a37003ae4fd2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.922936] env[61964]: DEBUG nova.compute.manager [None req-2387dcc4-1262-4af9-bd94-f9bcfb5dec8e tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: d0cd87ae-53f6-4b03-9b49-b84b34cea243] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2264.942928] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c73b38c1-53d0-4c98-814f-b6b8984bbaf5 could not be found. [ 2264.943175] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2264.943356] env[61964]: INFO nova.compute.manager [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2264.943604] env[61964]: DEBUG oslo.service.loopingcall [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2264.943848] env[61964]: DEBUG nova.compute.manager [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2264.943945] env[61964]: DEBUG nova.network.neutron [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2264.950100] env[61964]: DEBUG nova.compute.manager [None req-2387dcc4-1262-4af9-bd94-f9bcfb5dec8e tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: d0cd87ae-53f6-4b03-9b49-b84b34cea243] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2264.975096] env[61964]: DEBUG nova.network.neutron [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.980447] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2387dcc4-1262-4af9-bd94-f9bcfb5dec8e tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "d0cd87ae-53f6-4b03-9b49-b84b34cea243" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.043s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2264.982150] env[61964]: INFO nova.compute.manager [-] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] Took 0.04 seconds to deallocate network for instance. [ 2264.990941] env[61964]: DEBUG nova.compute.manager [None req-251dab60-8ba6-436b-87fa-8f09ceef3bad tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: abb43b76-b3df-4d4a-b1f4-801306d0f01f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2265.016348] env[61964]: DEBUG nova.compute.manager [None req-251dab60-8ba6-436b-87fa-8f09ceef3bad tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: abb43b76-b3df-4d4a-b1f4-801306d0f01f] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2265.039129] env[61964]: DEBUG oslo_concurrency.lockutils [None req-251dab60-8ba6-436b-87fa-8f09ceef3bad tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "abb43b76-b3df-4d4a-b1f4-801306d0f01f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.766s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2265.050020] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2265.086878] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2b94ae7a-f336-4e88-ab00-7a46f0433a11 tempest-ServerRescueTestJSON-1613959985 tempest-ServerRescueTestJSON-1613959985-project-member] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2265.088336] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 235.485s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2265.088336] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c73b38c1-53d0-4c98-814f-b6b8984bbaf5] During sync_power_state the instance has a pending task (deleting). Skip. [ 2265.088336] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "c73b38c1-53d0-4c98-814f-b6b8984bbaf5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2265.107511] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2265.107758] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2265.109274] env[61964]: INFO nova.compute.claims [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2265.345772] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a2e575-0bc3-44eb-ac73-df1d92c8b4d3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.353191] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a1d279-a003-4d1e-84a5-4220c5c2e30b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.386055] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "7d1977c2-cc88-4964-989a-9258f345c4f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2265.386055] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2265.386730] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c66715-704c-4361-87e7-63d7523fe3aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.394444] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada0519a-4036-470c-8b82-2e59b951945c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.408411] env[61964]: DEBUG nova.compute.provider_tree [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2265.416797] env[61964]: DEBUG nova.scheduler.client.report [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2265.440767] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2265.441280] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2265.483189] env[61964]: DEBUG nova.compute.utils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2265.484519] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2265.484685] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2265.495652] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2265.559502] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2265.563287] env[61964]: DEBUG nova.policy [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4206e3d8cc247299ece6857d4cf4cea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba01d58dd1074757ab694610eec59245', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2265.586568] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2265.586804] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2265.586964] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2265.587158] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2265.587542] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2265.587738] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2265.588043] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2265.588189] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2265.588399] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2265.588657] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2265.588955] env[61964]: DEBUG nova.virt.hardware [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2265.589835] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45973689-579a-4525-927c-2f33d4d166c5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.599888] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768242cd-725d-4aba-9fa5-2f19a25595bd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.885063] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Successfully created port: 5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2266.597979] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Successfully updated port: 5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2266.612454] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2266.612618] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquired lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2266.612772] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2266.662034] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2266.821346] env[61964]: DEBUG nova.compute.manager [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Received event network-vif-plugged-5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2266.821574] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Acquiring lock "aacff339-acaa-481d-930f-a4e838525cc2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2266.821859] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Lock "aacff339-acaa-481d-930f-a4e838525cc2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2266.821913] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Lock "aacff339-acaa-481d-930f-a4e838525cc2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2266.822176] env[61964]: DEBUG nova.compute.manager [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] No waiting events found dispatching network-vif-plugged-5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2266.822322] env[61964]: WARNING nova.compute.manager [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Received unexpected event network-vif-plugged-5168ff03-0acb-4fcc-bddb-5500e38df0aa for instance with vm_state building and task_state spawning. [ 2266.822483] env[61964]: DEBUG nova.compute.manager [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Received event network-changed-5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2266.822634] env[61964]: DEBUG nova.compute.manager [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Refreshing instance network info cache due to event network-changed-5168ff03-0acb-4fcc-bddb-5500e38df0aa. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2266.822797] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Acquiring lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2266.883687] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Updating instance_info_cache with network_info: [{"id": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "address": "fa:16:3e:99:20:fb", "network": {"id": "c9b69147-cbd3-4b2e-842a-97d6781e91b5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-922459386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba01d58dd1074757ab694610eec59245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5168ff03-0a", "ovs_interfaceid": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2266.898484] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Releasing lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2266.898773] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance network_info: |[{"id": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "address": "fa:16:3e:99:20:fb", "network": {"id": "c9b69147-cbd3-4b2e-842a-97d6781e91b5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-922459386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba01d58dd1074757ab694610eec59245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5168ff03-0a", "ovs_interfaceid": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2266.899079] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Acquired lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2266.899256] env[61964]: DEBUG nova.network.neutron [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Refreshing network info cache for port 5168ff03-0acb-4fcc-bddb-5500e38df0aa {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2266.900340] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:20:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4b43a78-f49b-4132-ab2e-6e28769a9498', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5168ff03-0acb-4fcc-bddb-5500e38df0aa', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2266.908755] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Creating folder: Project (ba01d58dd1074757ab694610eec59245). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2266.911695] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-272b7318-a3db-4a2e-8061-6636ca1a11e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.922343] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Created folder: Project (ba01d58dd1074757ab694610eec59245) in parent group-v351942. [ 2266.922343] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Creating folder: Instances. Parent ref: group-v352022. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2266.922557] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9dae53e-7fb5-4c5f-844e-823eb1ae63f7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.931806] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Created folder: Instances in parent group-v352022. [ 2266.932032] env[61964]: DEBUG oslo.service.loopingcall [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2266.932239] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2266.932441] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fa28925-c8d6-4c8c-8bb5-ec5dcb7b0f85 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.953379] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2266.953379] env[61964]: value = "task-1688713" [ 2266.953379] env[61964]: _type = "Task" [ 2266.953379] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.960335] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688713, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.156148] env[61964]: DEBUG nova.network.neutron [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Updated VIF entry in instance network info cache for port 5168ff03-0acb-4fcc-bddb-5500e38df0aa. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2267.156588] env[61964]: DEBUG nova.network.neutron [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Updating instance_info_cache with network_info: [{"id": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "address": "fa:16:3e:99:20:fb", "network": {"id": "c9b69147-cbd3-4b2e-842a-97d6781e91b5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-922459386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba01d58dd1074757ab694610eec59245", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4b43a78-f49b-4132-ab2e-6e28769a9498", "external-id": "nsx-vlan-transportzone-737", "segmentation_id": 737, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5168ff03-0a", "ovs_interfaceid": "5168ff03-0acb-4fcc-bddb-5500e38df0aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.166281] env[61964]: DEBUG oslo_concurrency.lockutils [req-52118a29-7c4e-4f39-ac35-e2d1043c74d4 req-be0122e5-5710-45ab-b0df-763cba513047 service nova] Releasing lock "refresh_cache-aacff339-acaa-481d-930f-a4e838525cc2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2267.463059] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688713, 'name': CreateVM_Task, 'duration_secs': 0.272891} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.463214] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2267.463885] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2267.465049] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2267.465049] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2267.465049] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91cf7b3c-4b20-425f-a817-a2645b5bd6a0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.468944] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for the task: (returnval){ [ 2267.468944] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5231f967-47e6-c683-c275-a3c1b34afe6e" [ 2267.468944] env[61964]: _type = "Task" [ 2267.468944] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.476466] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5231f967-47e6-c683-c275-a3c1b34afe6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.979390] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2267.979390] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2267.979390] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2289.778929] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "aacff339-acaa-481d-930f-a4e838525cc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2292.383631] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.384622] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.384926] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2293.384926] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2293.406674] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 63911858-5a79-4479-8c92-46afca980300] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2293.407955] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2294.383705] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.383846] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.384195] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2299.393917] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.394276] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2300.384507] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2301.384642] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2304.380446] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2304.383165] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.385448] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.396579] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2306.396812] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2306.396978] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2306.397150] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2306.398365] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bddc2b-4f3f-486c-a12b-722da3e31325 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.407122] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d139f4d9-93c7-4784-b249-c80ed3b5c07c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.420646] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fd604e-6bd7-4df2-9a1a-386f30ca094b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.426845] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c03327a-d09c-4494-979f-2237362225c0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.456412] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2306.456606] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2306.456776] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2306.569123] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 66787186-e8c6-4700-9caf-bd7e7970b65d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.569302] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.569432] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.569627] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.569832] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.570024] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.570168] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.570289] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.570408] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.570542] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2306.583054] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 469107ae-93b5-467d-94d5-d9c78766a934 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.594733] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.606024] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.616862] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.627168] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.637514] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2306.637755] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2306.637902] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2306.653919] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2306.669476] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2306.669687] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2306.681167] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2306.699500] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2306.879075] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8130a0e2-675f-4051-8877-0d3084168844 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.886644] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53269461-c9d7-4948-863a-d6d498ac0f62 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.917577] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c9d702-de20-4d83-a33b-b63a924f8919 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.925393] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827cfd33-e075-47a8-a379-791a6c1df0c0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.939478] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2306.948745] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2306.963520] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2306.963718] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.507s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2306.963948] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.964105] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2306.976707] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 1 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2306.977010] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c7caa492-efdd-462a-9bc3-9f19d6b0f7a8] Instance has had 0 of 5 cleanup attempts {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11211}} [ 2310.384685] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.840975] env[61964]: WARNING oslo_vmware.rw_handles [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.840975] env[61964]: ERROR oslo_vmware.rw_handles [ 2310.841498] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2310.843453] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2310.843764] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Copying Virtual Disk [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/20fe0710-2c52-45cd-88aa-42c028f3c2ba/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2310.844074] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fa1fe36-a230-470e-a713-65086e46b813 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.852276] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for the task: (returnval){ [ 2310.852276] env[61964]: value = "task-1688714" [ 2310.852276] env[61964]: _type = "Task" [ 2310.852276] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.860062] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Task: {'id': task-1688714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.362222] env[61964]: DEBUG oslo_vmware.exceptions [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2311.362499] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2311.363075] env[61964]: ERROR nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.363075] env[61964]: Faults: ['InvalidArgument'] [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Traceback (most recent call last): [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] yield resources [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self.driver.spawn(context, instance, image_meta, [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self._fetch_image_if_missing(context, vi) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] image_cache(vi, tmp_image_ds_loc) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] vm_util.copy_virtual_disk( [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] session._wait_for_task(vmdk_copy_task) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return self.wait_for_task(task_ref) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return evt.wait() [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] result = hub.switch() [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return self.greenlet.switch() [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self.f(*self.args, **self.kw) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] raise exceptions.translate_fault(task_info.error) [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Faults: ['InvalidArgument'] [ 2311.363075] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] [ 2311.364043] env[61964]: INFO nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Terminating instance [ 2311.365344] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2311.365561] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.365800] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebeacee5-d5c6-463e-a69d-16704b13c7d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.367955] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2311.368159] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2311.368886] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb16590a-857c-49db-9a8f-90bb67f3e64b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.375461] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2311.375669] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cccc260d-507c-409e-b9d8-693b9bbaa0da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.377760] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.377934] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2311.378859] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebc4e5ae-795c-4cf6-b5f4-0ae172d1bef2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.383506] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for the task: (returnval){ [ 2311.383506] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a3e105-e725-3737-0a69-c65d519695cd" [ 2311.383506] env[61964]: _type = "Task" [ 2311.383506] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.393854] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a3e105-e725-3737-0a69-c65d519695cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.453444] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2311.453722] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2311.453924] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Deleting the datastore file [datastore1] 66787186-e8c6-4700-9caf-bd7e7970b65d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2311.454212] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ff5065a-b5a3-4c37-863b-c59d7a3fa621 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.460272] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for the task: (returnval){ [ 2311.460272] env[61964]: value = "task-1688716" [ 2311.460272] env[61964]: _type = "Task" [ 2311.460272] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.468129] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Task: {'id': task-1688716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.894150] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2311.894406] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Creating directory with path [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.894631] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b116a291-ab40-48a4-bacc-d2f1587494e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.909656] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Created directory with path [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.909836] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Fetch image to [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2311.910010] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2311.910720] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c51c64-712d-448d-9926-865f4c021a64 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.916682] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa9947f-6098-4ca2-97bc-de4ce476b59b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.925382] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d83b9e-eb6d-4027-b1a9-618f703284cd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.959446] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261876d8-452c-4c03-9e4b-d2ff1028c177 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.969705] env[61964]: DEBUG oslo_vmware.api [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Task: {'id': task-1688716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079291} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.970877] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2311.971080] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2311.971257] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2311.971427] env[61964]: INFO nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2311.973172] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4e7ea8dd-f420-4c92-b0d5-38367a1f1ff4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.975094] env[61964]: DEBUG nova.compute.claims [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2311.975267] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2311.975479] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2311.998026] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2312.052233] env[61964]: DEBUG oslo_vmware.rw_handles [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2312.112198] env[61964]: DEBUG oslo_vmware.rw_handles [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2312.112393] env[61964]: DEBUG oslo_vmware.rw_handles [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2312.257433] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6060a707-a577-4ec2-982d-ce12198877ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.265750] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0487cb1e-8412-41cd-8310-450eba2da0ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.295963] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8eb731-c90a-4cf4-8d7c-bf71e4dbbdf4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.303166] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e16e95-f4e3-4ea8-9f52-499ec0ceea40 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.316608] env[61964]: DEBUG nova.compute.provider_tree [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.324951] env[61964]: DEBUG nova.scheduler.client.report [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2312.338115] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2312.338579] env[61964]: ERROR nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.338579] env[61964]: Faults: ['InvalidArgument'] [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Traceback (most recent call last): [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self.driver.spawn(context, instance, image_meta, [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self._fetch_image_if_missing(context, vi) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] image_cache(vi, tmp_image_ds_loc) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] vm_util.copy_virtual_disk( [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] session._wait_for_task(vmdk_copy_task) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return self.wait_for_task(task_ref) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return evt.wait() [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] result = hub.switch() [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] return self.greenlet.switch() [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] self.f(*self.args, **self.kw) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] raise exceptions.translate_fault(task_info.error) [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Faults: ['InvalidArgument'] [ 2312.338579] env[61964]: ERROR nova.compute.manager [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] [ 2312.339493] env[61964]: DEBUG nova.compute.utils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2312.340593] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Build of instance 66787186-e8c6-4700-9caf-bd7e7970b65d was re-scheduled: A specified parameter was not correct: fileType [ 2312.340593] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2312.340972] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2312.341156] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2312.341323] env[61964]: DEBUG nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2312.341480] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2312.803413] env[61964]: DEBUG nova.network.neutron [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.820018] env[61964]: INFO nova.compute.manager [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Took 0.48 seconds to deallocate network for instance. [ 2312.921641] env[61964]: INFO nova.scheduler.client.report [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Deleted allocations for instance 66787186-e8c6-4700-9caf-bd7e7970b65d [ 2312.956479] env[61964]: DEBUG oslo_concurrency.lockutils [None req-26797993-4eec-4536-916e-572f5a92cece tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.577s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2312.957776] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.834s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2312.958009] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Acquiring lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2312.958231] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2312.958398] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2312.960432] env[61964]: INFO nova.compute.manager [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Terminating instance [ 2312.962042] env[61964]: DEBUG nova.compute.manager [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2312.962244] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2312.962731] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c11c63b7-e0a5-467d-968a-4c7adfb0adb5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.974719] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040bf359-33b3-42c5-9cf2-d29dd7c3adeb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.989079] env[61964]: DEBUG nova.compute.manager [None req-3535e3e5-91a2-49fc-845e-24c4ecf2139d tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: 2f54f015-36dd-4390-a8a3-afa767581e44] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2313.007203] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66787186-e8c6-4700-9caf-bd7e7970b65d could not be found. [ 2313.007488] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2313.007682] env[61964]: INFO nova.compute.manager [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2313.007950] env[61964]: DEBUG oslo.service.loopingcall [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2313.008204] env[61964]: DEBUG nova.compute.manager [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2313.008316] env[61964]: DEBUG nova.network.neutron [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2313.014214] env[61964]: DEBUG nova.compute.manager [None req-3535e3e5-91a2-49fc-845e-24c4ecf2139d tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: 2f54f015-36dd-4390-a8a3-afa767581e44] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2313.035916] env[61964]: DEBUG nova.network.neutron [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.038896] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3535e3e5-91a2-49fc-845e-24c4ecf2139d tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "2f54f015-36dd-4390-a8a3-afa767581e44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.185s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.044249] env[61964]: INFO nova.compute.manager [-] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] Took 0.04 seconds to deallocate network for instance. [ 2313.050662] env[61964]: DEBUG nova.compute.manager [None req-52abaf69-9807-4d95-a935-c5e527c36741 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: 79a8f962-d252-4895-9ac3-a2e214fc7d82] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2313.073064] env[61964]: DEBUG nova.compute.manager [None req-52abaf69-9807-4d95-a935-c5e527c36741 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: 79a8f962-d252-4895-9ac3-a2e214fc7d82] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2313.094319] env[61964]: DEBUG oslo_concurrency.lockutils [None req-52abaf69-9807-4d95-a935-c5e527c36741 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "79a8f962-d252-4895-9ac3-a2e214fc7d82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.972s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.103247] env[61964]: DEBUG nova.compute.manager [None req-8cae8e77-203d-447b-a354-728dafa9c0c1 tempest-ServerShowV257Test-31655054 tempest-ServerShowV257Test-31655054-project-member] [instance: 97aa9519-8986-49b9-a5d3-5a24968c709b] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2313.138506] env[61964]: DEBUG nova.compute.manager [None req-8cae8e77-203d-447b-a354-728dafa9c0c1 tempest-ServerShowV257Test-31655054 tempest-ServerShowV257Test-31655054-project-member] [instance: 97aa9519-8986-49b9-a5d3-5a24968c709b] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2313.155104] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8f4df82a-43a3-4ca7-9e8d-232319f001d0 tempest-AttachInterfacesV270Test-313255921 tempest-AttachInterfacesV270Test-313255921-project-member] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.155643] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 283.552s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2313.155829] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 66787186-e8c6-4700-9caf-bd7e7970b65d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2313.155999] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "66787186-e8c6-4700-9caf-bd7e7970b65d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.163486] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8cae8e77-203d-447b-a354-728dafa9c0c1 tempest-ServerShowV257Test-31655054 tempest-ServerShowV257Test-31655054-project-member] Lock "97aa9519-8986-49b9-a5d3-5a24968c709b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.811s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.172881] env[61964]: DEBUG nova.compute.manager [None req-33cd8891-046f-4b6f-ac1c-c4b6356c89fc tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 469107ae-93b5-467d-94d5-d9c78766a934] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2313.197067] env[61964]: DEBUG nova.compute.manager [None req-33cd8891-046f-4b6f-ac1c-c4b6356c89fc tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] [instance: 469107ae-93b5-467d-94d5-d9c78766a934] Instance disappeared before build. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 2313.219735] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33cd8891-046f-4b6f-ac1c-c4b6356c89fc tempest-AttachVolumeNegativeTest-804760550 tempest-AttachVolumeNegativeTest-804760550-project-member] Lock "469107ae-93b5-467d-94d5-d9c78766a934" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 195.908s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.229880] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2313.293978] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2313.294258] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2313.296157] env[61964]: INFO nova.compute.claims [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2313.536892] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f4f495-44b4-49c8-8f7e-f0fac131be70 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.544091] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a40d70-ef53-4ec4-be4b-d54b75372971 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.573658] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8ccc4c-92b8-4bba-9936-1edd8f183261 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.580350] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60418743-ca3e-4807-9def-0ab830a08c3f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.594079] env[61964]: DEBUG nova.compute.provider_tree [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2313.605517] env[61964]: DEBUG nova.scheduler.client.report [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2313.621453] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.636811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "2ed78fb4-1ac4-4bc5-bf9a-574c6ad7e824" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2313.636811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "2ed78fb4-1ac4-4bc5-bf9a-574c6ad7e824" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2313.642512] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "2ed78fb4-1ac4-4bc5-bf9a-574c6ad7e824" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.006s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2313.643071] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2313.680097] env[61964]: DEBUG nova.compute.utils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2313.681476] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2313.681647] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2313.692665] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2313.759597] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2313.785500] env[61964]: DEBUG nova.policy [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '236eae1e31b9432bb1c2f6d55da6ffd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21c3faa57ac34f93b0664a9e2992ebca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2313.789182] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2313.789452] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2313.789652] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2313.789877] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2313.790067] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2313.790257] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2313.790512] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2313.790722] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2313.790952] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2313.791172] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2313.791382] env[61964]: DEBUG nova.virt.hardware [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2313.792513] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bcc9f0-8844-4bc6-89a1-28576a5061f1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.801160] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe07b05-5445-4f0c-9e44-e8a717033270 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.282022] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Successfully created port: 8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2315.463139] env[61964]: DEBUG nova.compute.manager [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Received event network-vif-plugged-8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2315.463408] env[61964]: DEBUG oslo_concurrency.lockutils [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] Acquiring lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2315.463602] env[61964]: DEBUG oslo_concurrency.lockutils [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2315.463810] env[61964]: DEBUG oslo_concurrency.lockutils [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2315.464016] env[61964]: DEBUG nova.compute.manager [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] No waiting events found dispatching network-vif-plugged-8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2315.464215] env[61964]: WARNING nova.compute.manager [req-f8d90213-a648-4096-b234-75c5337191c8 req-e69668fc-8ec5-4589-b6b5-9aff1d8ff096 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Received unexpected event network-vif-plugged-8beea16a-5f9a-44f5-b3d5-50242f2109e9 for instance with vm_state building and task_state spawning. [ 2315.563143] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Successfully updated port: 8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2315.579819] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2315.579963] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquired lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2315.580171] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2315.660420] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2315.978783] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Updating instance_info_cache with network_info: [{"id": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "address": "fa:16:3e:e0:49:40", "network": {"id": "e272e36d-ce7c-43bd-a14f-f0e15169810f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1014913187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21c3faa57ac34f93b0664a9e2992ebca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8beea16a-5f", "ovs_interfaceid": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.989567] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Releasing lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2315.989853] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance network_info: |[{"id": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "address": "fa:16:3e:e0:49:40", "network": {"id": "e272e36d-ce7c-43bd-a14f-f0e15169810f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1014913187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21c3faa57ac34f93b0664a9e2992ebca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8beea16a-5f", "ovs_interfaceid": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2315.990243] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:49:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8beea16a-5f9a-44f5-b3d5-50242f2109e9', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2315.998549] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Creating folder: Project (21c3faa57ac34f93b0664a9e2992ebca). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2315.999254] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7efcf211-4236-4f6b-839d-8cae2ddf4bcd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.009365] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Created folder: Project (21c3faa57ac34f93b0664a9e2992ebca) in parent group-v351942. [ 2316.009556] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Creating folder: Instances. Parent ref: group-v352025. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2316.009776] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-757eb80a-60a4-421e-9a26-5cf2f44a4dd7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.017792] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Created folder: Instances in parent group-v352025. [ 2316.018077] env[61964]: DEBUG oslo.service.loopingcall [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2316.018269] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2316.018462] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa67d8c1-2443-49fd-a284-bf7fb4fd32e7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.038830] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2316.038830] env[61964]: value = "task-1688719" [ 2316.038830] env[61964]: _type = "Task" [ 2316.038830] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.046359] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688719, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.551054] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688719, 'name': CreateVM_Task, 'duration_secs': 0.303844} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2316.551054] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2316.551054] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2316.551514] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2316.551514] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2316.551665] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66728252-785e-4005-b605-dab44750e525 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.555956] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for the task: (returnval){ [ 2316.555956] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b38206-0262-fdfc-cf23-480cf5911553" [ 2316.555956] env[61964]: _type = "Task" [ 2316.555956] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.563138] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b38206-0262-fdfc-cf23-480cf5911553, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.066727] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2317.067058] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2317.067323] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2317.490710] env[61964]: DEBUG nova.compute.manager [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Received event network-changed-8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2317.490856] env[61964]: DEBUG nova.compute.manager [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Refreshing instance network info cache due to event network-changed-8beea16a-5f9a-44f5-b3d5-50242f2109e9. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2317.491075] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] Acquiring lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2317.491226] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] Acquired lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2317.491388] env[61964]: DEBUG nova.network.neutron [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Refreshing network info cache for port 8beea16a-5f9a-44f5-b3d5-50242f2109e9 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2317.721505] env[61964]: DEBUG nova.network.neutron [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Updated VIF entry in instance network info cache for port 8beea16a-5f9a-44f5-b3d5-50242f2109e9. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2317.721848] env[61964]: DEBUG nova.network.neutron [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Updating instance_info_cache with network_info: [{"id": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "address": "fa:16:3e:e0:49:40", "network": {"id": "e272e36d-ce7c-43bd-a14f-f0e15169810f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1014913187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21c3faa57ac34f93b0664a9e2992ebca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8beea16a-5f", "ovs_interfaceid": "8beea16a-5f9a-44f5-b3d5-50242f2109e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2317.730879] env[61964]: DEBUG oslo_concurrency.lockutils [req-a8be4d1c-9260-41ed-9862-1110b10bbdc0 req-25567ff4-1bf0-4ba4-b058-aa58f68b8a01 service nova] Releasing lock "refresh_cache-f5589fbe-df43-4407-b63a-5e4f96021b61" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2353.391761] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.392125] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2353.392125] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2353.413980] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414174] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414311] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414437] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 63911858-5a79-4479-8c92-46afca980300] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414560] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414682] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414802] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.414921] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.415054] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.415177] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2353.415298] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2353.415799] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.384602] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2357.758019] env[61964]: WARNING oslo_vmware.rw_handles [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2357.758019] env[61964]: ERROR oslo_vmware.rw_handles [ 2357.758736] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2357.760353] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2357.760593] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Copying Virtual Disk [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/78a9c6a5-f860-4bdf-a3c7-685381cf6fa8/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2357.760879] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cf9840c-641a-4dc8-a2d8-3ce0fb98e845 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.768471] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for the task: (returnval){ [ 2357.768471] env[61964]: value = "task-1688720" [ 2357.768471] env[61964]: _type = "Task" [ 2357.768471] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2357.776468] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Task: {'id': task-1688720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.279332] env[61964]: DEBUG oslo_vmware.exceptions [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2358.279627] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2358.280208] env[61964]: ERROR nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2358.280208] env[61964]: Faults: ['InvalidArgument'] [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Traceback (most recent call last): [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] yield resources [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self.driver.spawn(context, instance, image_meta, [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self._fetch_image_if_missing(context, vi) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] image_cache(vi, tmp_image_ds_loc) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] vm_util.copy_virtual_disk( [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] session._wait_for_task(vmdk_copy_task) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return self.wait_for_task(task_ref) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return evt.wait() [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] result = hub.switch() [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return self.greenlet.switch() [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self.f(*self.args, **self.kw) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] raise exceptions.translate_fault(task_info.error) [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Faults: ['InvalidArgument'] [ 2358.280208] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] [ 2358.281208] env[61964]: INFO nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Terminating instance [ 2358.282070] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2358.282281] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2358.282522] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a619307-6147-4b8b-8adb-f504f4d6d459 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.284971] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2358.285179] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2358.285891] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d53a97d-ad87-4e9f-8cd7-e41ba4b31292 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.292308] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2358.292511] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f71e809-7de9-4578-8963-124d7e4af553 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.294572] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2358.294740] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2358.295678] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da480b1-abde-42c5-bb75-2f4f54244de2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.301495] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for the task: (returnval){ [ 2358.301495] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52aa615b-5726-70e0-1935-424c703a6aef" [ 2358.301495] env[61964]: _type = "Task" [ 2358.301495] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.311230] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52aa615b-5726-70e0-1935-424c703a6aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.381082] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2358.381297] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2358.381514] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Deleting the datastore file [datastore1] 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2358.381813] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-688bc5e2-a7a5-4191-8e4b-6000b0a8a2e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.388434] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for the task: (returnval){ [ 2358.388434] env[61964]: value = "task-1688722" [ 2358.388434] env[61964]: _type = "Task" [ 2358.388434] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2358.396209] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Task: {'id': task-1688722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2358.812555] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2358.812918] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Creating directory with path [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2358.813074] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3400fa3-885f-4763-83e3-ad91108b8ff0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.824458] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Created directory with path [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2358.824643] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Fetch image to [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2358.824809] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2358.825563] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24edfdf-faa5-49ca-a10a-673dbb8bc6b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.832143] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617bc5e0-50bb-4cd9-97ab-11592e602a5a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.840933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc80334-8cb6-489e-b40a-db690ddfdcdf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.871292] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd55daa5-5a4a-4b33-af9b-67b89795ddba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.876604] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f0151d5-59f0-4c27-8d1d-988e8fdc5a1a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.896745] env[61964]: DEBUG oslo_vmware.api [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Task: {'id': task-1688722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075451} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2358.898040] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2358.898236] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2358.898404] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2358.898572] env[61964]: INFO nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2358.900307] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2358.902321] env[61964]: DEBUG nova.compute.claims [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2358.902495] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2358.902703] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2358.953126] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2359.013301] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2359.013402] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2359.155921] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcb880a-4809-4b0d-a343-0e09b5f8cdc2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.163296] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304da228-3003-48aa-bf8d-bc76e4dbcfab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.194496] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42678954-3e7c-4208-8a68-837b936021ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.201552] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c4bb6d-6240-46b8-8c9b-ae70d436b9b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.214557] env[61964]: DEBUG nova.compute.provider_tree [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2359.222671] env[61964]: DEBUG nova.scheduler.client.report [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2359.238089] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2359.238847] env[61964]: ERROR nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2359.238847] env[61964]: Faults: ['InvalidArgument'] [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Traceback (most recent call last): [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self.driver.spawn(context, instance, image_meta, [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self._fetch_image_if_missing(context, vi) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] image_cache(vi, tmp_image_ds_loc) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] vm_util.copy_virtual_disk( [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] session._wait_for_task(vmdk_copy_task) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return self.wait_for_task(task_ref) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return evt.wait() [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] result = hub.switch() [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] return self.greenlet.switch() [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] self.f(*self.args, **self.kw) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] raise exceptions.translate_fault(task_info.error) [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Faults: ['InvalidArgument'] [ 2359.238847] env[61964]: ERROR nova.compute.manager [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] [ 2359.239829] env[61964]: DEBUG nova.compute.utils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2359.242408] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Build of instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 was re-scheduled: A specified parameter was not correct: fileType [ 2359.242408] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2359.242599] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2359.244160] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2359.244160] env[61964]: DEBUG nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2359.244160] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2359.652401] env[61964]: DEBUG nova.network.neutron [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2359.665680] env[61964]: INFO nova.compute.manager [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Took 0.42 seconds to deallocate network for instance. [ 2359.764641] env[61964]: INFO nova.scheduler.client.report [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Deleted allocations for instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 [ 2359.784062] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07f5bb23-87d6-47fe-9771-bac6ff5c5f16 tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 569.883s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2359.785941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.988s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2359.785941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Acquiring lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2359.785941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2359.785941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2359.787861] env[61964]: INFO nova.compute.manager [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Terminating instance [ 2359.789555] env[61964]: DEBUG nova.compute.manager [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2359.789751] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2359.790592] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-984ee2f0-cbc2-4aa7-b882-99790d2d6fdb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.801882] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3303a8-f88f-4a49-ab3e-9ea0703e063a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.820195] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2359.832365] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96 could not be found. [ 2359.832574] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2359.832749] env[61964]: INFO nova.compute.manager [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2359.832992] env[61964]: DEBUG oslo.service.loopingcall [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2359.833480] env[61964]: DEBUG nova.compute.manager [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2359.833591] env[61964]: DEBUG nova.network.neutron [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2359.861816] env[61964]: DEBUG nova.network.neutron [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2359.868353] env[61964]: INFO nova.compute.manager [-] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] Took 0.03 seconds to deallocate network for instance. [ 2359.873300] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2359.873539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2359.874950] env[61964]: INFO nova.compute.claims [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2359.964976] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7f560d0f-125b-4e1d-a7fe-e09efa7c638b tempest-ImagesNegativeTestJSON-2024672432 tempest-ImagesNegativeTestJSON-2024672432-project-member] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2359.966022] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 330.363s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2359.966189] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96] During sync_power_state the instance has a pending task (deleting). Skip. [ 2359.966486] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "5d9f2c9f-2ec7-480c-b2b6-e1c1590b2a96" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2360.070307] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136a4aec-4252-42c3-b503-a46eded2d536 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.078545] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1c64e2-44dd-4574-9407-9ef3a94e67d4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.109697] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade61a0f-bf8a-48b3-80cd-663dd0dd043e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.117217] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f206d7c5-e21b-4196-8bbe-ffd0b0ba4f65 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.132026] env[61964]: DEBUG nova.compute.provider_tree [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.141214] env[61964]: DEBUG nova.scheduler.client.report [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2360.155771] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2360.156303] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2360.192041] env[61964]: DEBUG nova.compute.utils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2360.197202] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Not allocating networking since 'none' was specified. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 2360.204521] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2360.270909] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2360.296663] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2360.296906] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2360.297077] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2360.297265] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2360.297414] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2360.297561] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2360.297765] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2360.297919] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2360.298098] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2360.298264] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2360.298434] env[61964]: DEBUG nova.virt.hardware [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2360.299300] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63dc9d5-c357-4c86-bde1-949128a7f96e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.307310] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b06ad1-4455-4bcf-81cf-fcf0d6fce9d4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.320414] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance VIF info [] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2360.325877] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Creating folder: Project (8f6751ef2bae429bbf8bb7e40a4dc712). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2360.326143] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb4bf0a1-554b-41b5-bb2f-34fc4c04672c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.337046] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Created folder: Project (8f6751ef2bae429bbf8bb7e40a4dc712) in parent group-v351942. [ 2360.337261] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Creating folder: Instances. Parent ref: group-v352028. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2360.337479] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f9053af-08e9-4aaa-9edd-0ca17ecb0b8f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.345995] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Created folder: Instances in parent group-v352028. [ 2360.346244] env[61964]: DEBUG oslo.service.loopingcall [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2360.346429] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2360.346612] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98d0405f-e0b4-4a77-9943-5603bc686a5d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.361916] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2360.361916] env[61964]: value = "task-1688725" [ 2360.361916] env[61964]: _type = "Task" [ 2360.361916] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.368788] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688725, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.871635] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688725, 'name': CreateVM_Task, 'duration_secs': 0.237308} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.871986] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2360.872216] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2360.872376] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2360.872685] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2360.872923] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8ee9d48-7d57-4f20-b00e-2d394c32272c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.877015] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for the task: (returnval){ [ 2360.877015] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527ef923-8812-b497-205e-48ddd2551066" [ 2360.877015] env[61964]: _type = "Task" [ 2360.877015] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.884033] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527ef923-8812-b497-205e-48ddd2551066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.383381] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.383572] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2361.387457] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2361.387689] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2361.387898] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2362.383968] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.384825] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2364.380149] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2365.383706] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.383617] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.395943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2367.395943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2367.395943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2367.395943] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2367.398022] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff61e08-dddb-4c78-8148-ec0fac5e2c0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.405933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c45742-32b8-4b17-8438-dd8d9bead9c9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.422714] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240495aa-e516-4c55-817d-8e736f4faaca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.429034] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7992a229-23c9-4abb-a99d-56108f69e0a4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.457237] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181308MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2367.457383] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2367.457561] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2367.527674] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 59c25eab-011e-4690-99fe-976f8dbea580 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.527827] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b6c97be0-e146-46b1-8d2e-085818e45835 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.527954] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 63911858-5a79-4479-8c92-46afca980300 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528089] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528210] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528327] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528440] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528554] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528665] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.528776] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2367.538905] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2367.548367] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2367.557203] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2367.557409] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2367.557552] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2367.691352] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ed0974-cbf1-4049-976a-a366d3d1d0d1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.698991] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270484fd-2446-4385-b8cc-95bb9b09bac3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.729126] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d84a31-ea57-4e08-b171-0bbd893aee5d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.735506] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09dca94-a6f8-41a3-a0d0-e5676df73962 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.747705] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2367.757982] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2367.771351] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2367.771523] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.314s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2371.768698] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.883125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2371.883409] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2384.176015] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "f5589fbe-df43-4407-b63a-5e4f96021b61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2405.876438] env[61964]: WARNING oslo_vmware.rw_handles [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2405.876438] env[61964]: ERROR oslo_vmware.rw_handles [ 2405.877059] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2405.878852] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2405.879135] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Copying Virtual Disk [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/ac727265-ccf4-4ca1-b025-951a2a4fccf0/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2405.879435] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5141918-1a2d-47b4-af25-944ad6b5703e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.888728] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for the task: (returnval){ [ 2405.888728] env[61964]: value = "task-1688726" [ 2405.888728] env[61964]: _type = "Task" [ 2405.888728] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.896207] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Task: {'id': task-1688726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.400118] env[61964]: DEBUG oslo_vmware.exceptions [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2406.400400] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2406.400928] env[61964]: ERROR nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2406.400928] env[61964]: Faults: ['InvalidArgument'] [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Traceback (most recent call last): [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] yield resources [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self.driver.spawn(context, instance, image_meta, [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self._fetch_image_if_missing(context, vi) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] image_cache(vi, tmp_image_ds_loc) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] vm_util.copy_virtual_disk( [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] session._wait_for_task(vmdk_copy_task) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return self.wait_for_task(task_ref) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return evt.wait() [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] result = hub.switch() [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return self.greenlet.switch() [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self.f(*self.args, **self.kw) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] raise exceptions.translate_fault(task_info.error) [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Faults: ['InvalidArgument'] [ 2406.400928] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] [ 2406.401964] env[61964]: INFO nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Terminating instance [ 2406.402790] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2406.403012] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2406.403243] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1bcbe47-654b-4c32-8aa2-b3eb7b5a2ca3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.405320] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2406.405510] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2406.406222] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bad08c-a8f3-4d4e-b661-e9f90a935e7b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.412924] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2406.413158] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccac3b7c-6570-4daa-a314-4db09be09b3c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.415240] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2406.415409] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2406.416355] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2d50fc-0a6a-4f31-8f9c-d4d262cf7b92 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.421195] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 2406.421195] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ea19d7-7c62-08cb-7ca0-a0fef28b8f4f" [ 2406.421195] env[61964]: _type = "Task" [ 2406.421195] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.430513] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ea19d7-7c62-08cb-7ca0-a0fef28b8f4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.488198] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2406.488422] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2406.488585] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Deleting the datastore file [datastore1] 59c25eab-011e-4690-99fe-976f8dbea580 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2406.488847] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9b58985-9b09-4047-847a-49c63e00afad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.495183] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for the task: (returnval){ [ 2406.495183] env[61964]: value = "task-1688728" [ 2406.495183] env[61964]: _type = "Task" [ 2406.495183] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.502481] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Task: {'id': task-1688728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.931961] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2406.931961] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Creating directory with path [datastore1] vmware_temp/b4988c7b-363d-49d0-805f-0288420b1e83/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2406.932359] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9690f2e-57d2-46ed-b28a-32bed709fadf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.944114] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Created directory with path [datastore1] vmware_temp/b4988c7b-363d-49d0-805f-0288420b1e83/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2406.944114] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Fetch image to [datastore1] vmware_temp/b4988c7b-363d-49d0-805f-0288420b1e83/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2406.944352] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/b4988c7b-363d-49d0-805f-0288420b1e83/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2406.945082] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259828e0-ec55-49d1-992e-ba94c61ab9a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.952965] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f7bf36-82cc-4cd4-ab9e-5737a7b188a0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.960668] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a027e5a-fe8f-4883-9cfa-6bbce2c9c4ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.991307] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5040e782-9526-424e-b41c-b40b6530e724 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.999959] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-248a8914-d2c6-4bd0-af81-a32894492caa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.006587] env[61964]: DEBUG oslo_vmware.api [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Task: {'id': task-1688728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068633} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.006821] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2407.007009] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2407.007192] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2407.007363] env[61964]: INFO nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2407.009465] env[61964]: DEBUG nova.compute.claims [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2407.009637] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2407.009868] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2407.019982] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2407.149778] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2407.150789] env[61964]: ERROR nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] yield resources [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.driver.spawn(context, instance, image_meta, [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._fetch_image_if_missing(context, vi) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image_fetch(context, vi, tmp_image_ds_loc) [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] images.fetch_image( [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2407.150789] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] metadata = IMAGE_API.get(context, image_ref) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return session.show(context, image_id, [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] _reraise_translated_image_exception(image_id) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise new_exc.with_traceback(exc_trace) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2407.151967] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2407.151967] env[61964]: INFO nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Terminating instance [ 2407.152725] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2407.152932] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2407.153587] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2407.153770] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2407.153998] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09055b6b-1778-4ffd-bab2-da57a16c999f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.156737] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50740e73-788d-4f6e-9626-bcdc42a4ac8c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.166494] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2407.166785] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bb5a4b0-473f-458b-855c-139d3d54383f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.169168] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2407.169343] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2407.170327] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c2a788d-63f5-4114-9bde-6df41354e6b7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.177747] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2407.177747] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cc878b-e901-9452-76f2-d4e7b50ac257" [ 2407.177747] env[61964]: _type = "Task" [ 2407.177747] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.186694] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cc878b-e901-9452-76f2-d4e7b50ac257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.244839] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2407.245074] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2407.245258] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleting the datastore file [datastore1] b6c97be0-e146-46b1-8d2e-085818e45835 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2407.245528] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a9a0a96-17b6-4108-ba46-74303c9cbecc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.248813] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16b001b-86db-4f84-8f20-bd0f435632a1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.256776] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85e8166-d3a0-4dc9-94e1-5ff2c7bd5aeb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.259847] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for the task: (returnval){ [ 2407.259847] env[61964]: value = "task-1688730" [ 2407.259847] env[61964]: _type = "Task" [ 2407.259847] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.287765] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2505efa-9328-48a9-bd4b-3f3ce5399366 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.293311] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': task-1688730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2407.298197] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd04e107-a5b6-4071-84ad-75deb445aa28 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.311473] env[61964]: DEBUG nova.compute.provider_tree [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2407.321181] env[61964]: DEBUG nova.scheduler.client.report [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2407.343811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2407.344399] env[61964]: ERROR nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2407.344399] env[61964]: Faults: ['InvalidArgument'] [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Traceback (most recent call last): [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self.driver.spawn(context, instance, image_meta, [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self._fetch_image_if_missing(context, vi) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] image_cache(vi, tmp_image_ds_loc) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] vm_util.copy_virtual_disk( [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] session._wait_for_task(vmdk_copy_task) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return self.wait_for_task(task_ref) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return evt.wait() [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] result = hub.switch() [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] return self.greenlet.switch() [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] self.f(*self.args, **self.kw) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] raise exceptions.translate_fault(task_info.error) [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Faults: ['InvalidArgument'] [ 2407.344399] env[61964]: ERROR nova.compute.manager [instance: 59c25eab-011e-4690-99fe-976f8dbea580] [ 2407.348688] env[61964]: DEBUG nova.compute.utils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2407.348688] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Build of instance 59c25eab-011e-4690-99fe-976f8dbea580 was re-scheduled: A specified parameter was not correct: fileType [ 2407.348688] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2407.348688] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2407.348688] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2407.348688] env[61964]: DEBUG nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2407.348688] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2407.691468] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2407.692064] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Creating directory with path [datastore1] vmware_temp/6729cd7d-d793-4c70-b089-3abf8cbbe2f6/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2407.692492] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4cdee5d-c87a-424b-8e08-e29b0ed98437 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.704841] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Created directory with path [datastore1] vmware_temp/6729cd7d-d793-4c70-b089-3abf8cbbe2f6/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2407.707018] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Fetch image to [datastore1] vmware_temp/6729cd7d-d793-4c70-b089-3abf8cbbe2f6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2407.707018] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/6729cd7d-d793-4c70-b089-3abf8cbbe2f6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2407.707018] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bf2613-9bfe-448c-9a86-b41aae2666f9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.713946] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a96534-8f2f-46e9-9c01-12e91bf1bf03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.723692] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9d9d5b-941d-441a-a988-6f28c210ab8a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.758463] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81229d2c-d9cd-4599-9726-64598979cb33 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.771325] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d4615913-f3a4-4314-9f4d-76fc45354a81 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.773380] env[61964]: DEBUG oslo_vmware.api [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Task: {'id': task-1688730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078574} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2407.773765] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2407.774029] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2407.774267] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2407.774494] env[61964]: INFO nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2407.780993] env[61964]: DEBUG nova.compute.claims [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2407.781221] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2407.781448] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2407.800361] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2407.924469] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2407.925290] env[61964]: ERROR nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] yield resources [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.driver.spawn(context, instance, image_meta, [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._fetch_image_if_missing(context, vi) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image_fetch(context, vi, tmp_image_ds_loc) [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] images.fetch_image( [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2407.925290] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] metadata = IMAGE_API.get(context, image_ref) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return session.show(context, image_id, [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] _reraise_translated_image_exception(image_id) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise new_exc.with_traceback(exc_trace) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2407.926999] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2407.926999] env[61964]: INFO nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Terminating instance [ 2407.928385] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2407.928385] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2407.930365] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2407.930550] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2407.930808] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d7b89d-f218-40eb-a630-d943c5637543 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.933958] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d6d140-e4f7-4f47-a775-afdf24883757 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.942192] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2407.943432] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c2effce-f2fb-47e8-8303-c9db48da98bc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.945113] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2407.945308] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2407.946032] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3b1dab-590a-424c-8ceb-ebd7a90e08ce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.953240] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for the task: (returnval){ [ 2407.953240] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c8f07-04cf-d015-abb6-8be6b196f99d" [ 2407.953240] env[61964]: _type = "Task" [ 2407.953240] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2407.965076] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c8f07-04cf-d015-abb6-8be6b196f99d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2408.014557] env[61964]: DEBUG nova.network.neutron [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2408.027058] env[61964]: INFO nova.compute.manager [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Took 0.68 seconds to deallocate network for instance. [ 2408.092879] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bfbdc3-6d7b-4022-905f-fb1529a42660 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.101536] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950be9a0-bb7c-4504-9fb2-eb1ea4cebb84 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.133810] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a599c97-d07e-44db-9a28-d9dd64a2373b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.143974] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896b6579-89e8-477f-a281-13183ea9a61b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.164641] env[61964]: DEBUG nova.compute.provider_tree [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2408.168205] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2408.168572] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2408.168742] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleting the datastore file [datastore1] 63911858-5a79-4479-8c92-46afca980300 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2408.169366] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dafc123-8c95-4135-9c71-0e1136628739 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.172225] env[61964]: INFO nova.scheduler.client.report [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Deleted allocations for instance 59c25eab-011e-4690-99fe-976f8dbea580 [ 2408.180018] env[61964]: DEBUG nova.scheduler.client.report [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2408.185837] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for the task: (returnval){ [ 2408.185837] env[61964]: value = "task-1688732" [ 2408.185837] env[61964]: _type = "Task" [ 2408.185837] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2408.194815] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': task-1688732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2408.200090] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0e993730-8819-41b1-a6c8-62d52a6f960f tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.067s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.201252] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 381.910s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.201483] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Acquiring lock "59c25eab-011e-4690-99fe-976f8dbea580-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.201711] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.202354] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.204270] env[61964]: INFO nova.compute.manager [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Terminating instance [ 2408.206185] env[61964]: DEBUG nova.compute.manager [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2408.206380] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2408.207425] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a1c4c0e-db47-4092-a2dd-ace52efc9b6d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.210604] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.429s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.211327] env[61964]: ERROR nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.driver.spawn(context, instance, image_meta, [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._fetch_image_if_missing(context, vi) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image_fetch(context, vi, tmp_image_ds_loc) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] images.fetch_image( [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] metadata = IMAGE_API.get(context, image_ref) [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2408.211327] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return session.show(context, image_id, [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] _reraise_translated_image_exception(image_id) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise new_exc.with_traceback(exc_trace) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2408.212788] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.212788] env[61964]: DEBUG nova.compute.utils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2408.214243] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Build of instance b6c97be0-e146-46b1-8d2e-085818e45835 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2408.214440] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2408.214713] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2408.214883] env[61964]: DEBUG nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2408.214984] env[61964]: DEBUG nova.network.neutron [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2408.217271] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2408.226085] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4359a7d-0544-459c-8a1a-ca2b19751db9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.255481] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59c25eab-011e-4690-99fe-976f8dbea580 could not be found. [ 2408.255716] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2408.255902] env[61964]: INFO nova.compute.manager [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2408.256182] env[61964]: DEBUG oslo.service.loopingcall [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2408.256414] env[61964]: DEBUG nova.compute.manager [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2408.256513] env[61964]: DEBUG nova.network.neutron [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2408.284766] env[61964]: DEBUG nova.network.neutron [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2408.289650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.289910] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.291912] env[61964]: INFO nova.compute.claims [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2408.295480] env[61964]: INFO nova.compute.manager [-] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] Took 0.04 seconds to deallocate network for instance. [ 2408.362060] env[61964]: DEBUG neutronclient.v2_0.client [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2408.363313] env[61964]: ERROR nova.compute.manager [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.driver.spawn(context, instance, image_meta, [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._fetch_image_if_missing(context, vi) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image_fetch(context, vi, tmp_image_ds_loc) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] images.fetch_image( [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] metadata = IMAGE_API.get(context, image_ref) [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2408.363313] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return session.show(context, image_id, [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] _reraise_translated_image_exception(image_id) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise new_exc.with_traceback(exc_trace) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = getattr(controller, method)(*args, **kwargs) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._get(image_id) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] resp, body = self.http_client.get(url, headers=header) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.request(url, 'GET', **kwargs) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self._handle_response(resp) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exc.from_response(resp, resp.content) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._build_and_run_instance(context, instance, image, [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exception.RescheduledException( [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.RescheduledException: Build of instance b6c97be0-e146-46b1-8d2e-085818e45835 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] exception_handler_v20(status_code, error_body) [ 2408.364379] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise client_exc(message=error_message, [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Neutron server returns request_ids: ['req-cdb9e97a-3bd6-4157-a6d4-be8e337f468a'] [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._deallocate_network(context, instance, requested_networks) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.network_api.deallocate_for_instance( [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] data = neutron.list_ports(**search_opts) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.list('ports', self.ports_path, retrieve_all, [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] for r in self._pagination(collection, path, **params): [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] res = self.get(path, params=params) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.retry_request("GET", action, body=body, [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.do_request(method, action, body=body, [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._handle_fault_response(status_code, replybody, resp) [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exception.Unauthorized() [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.Unauthorized: Not authorized. [ 2408.365675] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.395828] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2d75dd8e-d65f-4e7f-845b-ad9a87705d14 tempest-InstanceActionsNegativeTestJSON-1619894296 tempest-InstanceActionsNegativeTestJSON-1619894296-project-member] Lock "59c25eab-011e-4690-99fe-976f8dbea580" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.396661] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "59c25eab-011e-4690-99fe-976f8dbea580" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 378.793s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.396801] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 59c25eab-011e-4690-99fe-976f8dbea580] During sync_power_state the instance has a pending task (deleting). Skip. [ 2408.397030] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "59c25eab-011e-4690-99fe-976f8dbea580" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.422821] env[61964]: INFO nova.scheduler.client.report [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Deleted allocations for instance b6c97be0-e146-46b1-8d2e-085818e45835 [ 2408.446405] env[61964]: DEBUG oslo_concurrency.lockutils [None req-89c20b15-d16e-4b48-99c0-30fd501ec495 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 554.219s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.448125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 378.844s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.448342] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During sync_power_state the instance has a pending task (spawning). Skip. [ 2408.449086] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.449220] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 357.614s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.449414] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Acquiring lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.449640] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.449830] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.451850] env[61964]: INFO nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Terminating instance [ 2408.453757] env[61964]: DEBUG nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2408.453986] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2408.454341] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-876d7761-2521-4421-84bf-9cf566cec075 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.465144] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2408.473048] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2408.473375] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Creating directory with path [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2408.477145] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1183fa46-16af-4533-8788-a4d2be9416af {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.492081] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7ada8a7-6a12-43ec-a0d3-b6971398eb1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.512927] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6c97be0-e146-46b1-8d2e-085818e45835 could not be found. [ 2408.513185] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2408.513374] env[61964]: INFO nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2408.513689] env[61964]: DEBUG oslo.service.loopingcall [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2408.517735] env[61964]: DEBUG nova.compute.manager [-] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2408.517867] env[61964]: DEBUG nova.network.neutron [-] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2408.519772] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Created directory with path [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2408.519998] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Fetch image to [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2408.520216] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2408.521620] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb1d920-1776-4ac2-94ba-c9d61e57465e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.529326] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eeb00a-4eed-4cc5-a858-a24c8aceae19 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.551190] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e732964-717e-439a-adec-7bc0b5f46400 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.555668] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.591007] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b93826-b796-4ab5-a8d1-3bba261f3347 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.597859] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2170e40d-6843-4530-aafa-5c02adec1f94 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.605554] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b05af0-4823-4d57-bafa-7453c81ce832 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.611891] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a0d39d-5c96-44ef-b22b-6322abb00313 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.643860] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c58418d-32f7-4d92-88b1-0b34a172a99f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.646798] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2408.654382] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83de367f-c23b-439c-95f2-b5f87c20a2d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.660896] env[61964]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2408.660896] env[61964]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-85d6d9e5-d727-4d44-a3b1-770f44ce4989'] [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.661070] env[61964]: ERROR oslo.service.loopingcall [ 2408.664174] env[61964]: ERROR nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.673928] env[61964]: DEBUG nova.compute.provider_tree [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2408.682577] env[61964]: DEBUG nova.scheduler.client.report [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2408.698384] env[61964]: DEBUG oslo_vmware.api [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Task: {'id': task-1688732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089061} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2408.698454] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2408.702020] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2408.702020] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2408.702020] env[61964]: INFO nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Took 0.77 seconds to destroy the instance on the hypervisor. [ 2408.702020] env[61964]: DEBUG nova.compute.claims [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2408.702020] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2408.703397] env[61964]: ERROR nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] exception_handler_v20(status_code, error_body) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise client_exc(message=error_message, [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Neutron server returns request_ids: ['req-85d6d9e5-d727-4d44-a3b1-770f44ce4989'] [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] During handling of the above exception, another exception occurred: [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Traceback (most recent call last): [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._delete_instance(context, instance, bdms) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._shutdown_instance(context, instance, bdms) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._try_deallocate_network(context, instance, requested_networks) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] with excutils.save_and_reraise_exception(): [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.force_reraise() [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise self.value [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] _deallocate_network_with_retries() [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return evt.wait() [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = hub.switch() [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.greenlet.switch() [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = func(*self.args, **self.kw) [ 2408.703397] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] result = f(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._deallocate_network( [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self.network_api.deallocate_for_instance( [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] data = neutron.list_ports(**search_opts) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.list('ports', self.ports_path, retrieve_all, [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] for r in self._pagination(collection, path, **params): [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] res = self.get(path, params=params) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.retry_request("GET", action, body=body, [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] return self.do_request(method, action, body=body, [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] ret = obj(*args, **kwargs) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] self._handle_fault_response(status_code, replybody, resp) [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.708185] env[61964]: ERROR nova.compute.manager [instance: b6c97be0-e146-46b1-8d2e-085818e45835] [ 2408.708185] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.416s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.708185] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2408.710811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.155s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2408.712193] env[61964]: INFO nova.compute.claims [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2408.728878] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2408.785292] env[61964]: DEBUG nova.compute.utils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2408.787516] env[61964]: DEBUG oslo_concurrency.lockutils [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Lock "b6c97be0-e146-46b1-8d2e-085818e45835" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.338s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2408.791887] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2408.792077] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2408.794349] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2408.794539] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2408.795975] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2408.847396] env[61964]: INFO nova.compute.manager [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] [instance: b6c97be0-e146-46b1-8d2e-085818e45835] Successfully reverted task state from None on failure for instance. [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server [None req-ef66b2b8-8f04-4de5-8325-ce18130bb626 tempest-MigrationsAdminTest-329188456 tempest-MigrationsAdminTest-329188456-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-85d6d9e5-d727-4d44-a3b1-770f44ce4989'] [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 2408.853900] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2408.855635] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2408.857013] env[61964]: ERROR oslo_messaging.rpc.server [ 2408.865251] env[61964]: DEBUG nova.policy [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c586df6784a4660ab9602e225cceece', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a3f71b0c37437a98325ca4a2f48d46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2408.876685] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2408.903721] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2408.903982] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2408.904155] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2408.904336] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2408.904481] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2408.904626] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2408.904833] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2408.904989] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2408.905170] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2408.905330] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2408.905498] env[61964]: DEBUG nova.virt.hardware [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2408.906420] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f4f263-1a29-4c1b-ae6f-05f85f62bc60 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.915335] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8056634a-827a-488f-bff3-3ba8d133da2a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.004101] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb5d369-c2ea-470d-a79f-00ba9fbc96d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.011100] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919098d2-699e-433a-b9f7-99b14f8d4e0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.042641] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252989fe-6762-4fbb-a40e-0aa85bc786d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.049828] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77a0a18-a967-4531-9d1a-989751839623 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.062994] env[61964]: DEBUG nova.compute.provider_tree [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2409.072094] env[61964]: DEBUG nova.scheduler.client.report [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2409.088009] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.377s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2409.088525] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2409.090896] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.389s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2409.126193] env[61964]: DEBUG nova.compute.utils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2409.127499] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2409.129384] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2409.143722] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2409.221410] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2409.245262] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2409.245535] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2409.245684] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2409.245871] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2409.246042] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2409.246201] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2409.246407] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2409.246582] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2409.246948] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2409.246948] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2409.247206] env[61964]: DEBUG nova.virt.hardware [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2409.248088] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d89ab5f-9fab-44ca-8729-53aac89dcb76 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.259746] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db622dce-f0b6-48b9-ae8c-1eab0332907d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.281028] env[61964]: DEBUG nova.policy [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd51a99e513144cbac3e397d7ec1dc45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40991aebb97545db8c04deda187dcdfd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2409.299807] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Successfully created port: f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2409.332431] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790a3bcb-bd68-454a-b44d-6b3614f0ef00 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.340164] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af91236-e73c-4014-9e3d-a71ff88b3651 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.374019] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02893d0-70f9-419c-bdaf-5c7c931bd032 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.380774] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb66cf8c-dbf0-4e85-a61e-1fd370612848 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.394174] env[61964]: DEBUG nova.compute.provider_tree [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2409.403653] env[61964]: DEBUG nova.scheduler.client.report [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2409.419943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.329s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2409.420696] env[61964]: ERROR nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.driver.spawn(context, instance, image_meta, [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._fetch_image_if_missing(context, vi) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image_fetch(context, vi, tmp_image_ds_loc) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] images.fetch_image( [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] metadata = IMAGE_API.get(context, image_ref) [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2409.420696] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return session.show(context, image_id, [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] _reraise_translated_image_exception(image_id) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise new_exc.with_traceback(exc_trace) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2409.422425] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.422425] env[61964]: DEBUG nova.compute.utils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2409.423485] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Build of instance 63911858-5a79-4479-8c92-46afca980300 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2409.423986] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2409.427885] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2409.427885] env[61964]: DEBUG nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2409.427885] env[61964]: DEBUG nova.network.neutron [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2409.589714] env[61964]: DEBUG neutronclient.v2_0.client [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2409.591087] env[61964]: ERROR nova.compute.manager [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.driver.spawn(context, instance, image_meta, [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._fetch_image_if_missing(context, vi) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image_fetch(context, vi, tmp_image_ds_loc) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] images.fetch_image( [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] metadata = IMAGE_API.get(context, image_ref) [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 2409.591087] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return session.show(context, image_id, [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] _reraise_translated_image_exception(image_id) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise new_exc.with_traceback(exc_trace) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = getattr(controller, method)(*args, **kwargs) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._get(image_id) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] resp, body = self.http_client.get(url, headers=header) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.request(url, 'GET', **kwargs) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self._handle_response(resp) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exc.from_response(resp, resp.content) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.ImageNotAuthorized: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._build_and_run_instance(context, instance, image, [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exception.RescheduledException( [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.RescheduledException: Build of instance 63911858-5a79-4479-8c92-46afca980300 was re-scheduled: Not authorized for image d9802c76-d112-4072-8a46-ca03ed36e004. [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] exception_handler_v20(status_code, error_body) [ 2409.592011] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise client_exc(message=error_message, [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Neutron server returns request_ids: ['req-69197e34-6238-40db-8ae4-6720afdf499f'] [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._deallocate_network(context, instance, requested_networks) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.network_api.deallocate_for_instance( [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] data = neutron.list_ports(**search_opts) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.list('ports', self.ports_path, retrieve_all, [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] for r in self._pagination(collection, path, **params): [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] res = self.get(path, params=params) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.retry_request("GET", action, body=body, [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.do_request(method, action, body=body, [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._handle_fault_response(status_code, replybody, resp) [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exception.Unauthorized() [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.Unauthorized: Not authorized. [ 2409.592981] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2409.652022] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Successfully created port: 5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2409.672710] env[61964]: INFO nova.scheduler.client.report [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Deleted allocations for instance 63911858-5a79-4479-8c92-46afca980300 [ 2409.700668] env[61964]: DEBUG oslo_concurrency.lockutils [None req-edf21044-6d15-497b-93a0-5d98962113ec tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 533.126s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2409.701912] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 336.499s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2409.702155] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "63911858-5a79-4479-8c92-46afca980300-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2409.702377] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2409.706053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2409.708367] env[61964]: INFO nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Terminating instance [ 2409.710650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquiring lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2409.710650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Acquired lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2409.710773] env[61964]: DEBUG nova.network.neutron [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2409.715707] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2409.784292] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2409.784547] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2409.786330] env[61964]: INFO nova.compute.claims [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2410.043520] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311d7cc4-1ae6-417b-81cb-f5e4cde7c325 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.053638] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042f2df3-8e8e-4d19-8745-996534ccf6ab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.086186] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2b6409-f4f8-45ad-ae9e-8915dac08eec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.094046] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f88d63-d456-4f99-8fc0-58628cdb9d4b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.107705] env[61964]: DEBUG nova.compute.provider_tree [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2410.116749] env[61964]: DEBUG nova.scheduler.client.report [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2410.137831] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2410.138391] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2410.182315] env[61964]: DEBUG nova.compute.utils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2410.183593] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2410.183773] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2410.192294] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2410.250856] env[61964]: DEBUG nova.compute.manager [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Received event network-vif-plugged-f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2410.251208] env[61964]: DEBUG oslo_concurrency.lockutils [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] Acquiring lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2410.251463] env[61964]: DEBUG oslo_concurrency.lockutils [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] Lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2410.251716] env[61964]: DEBUG oslo_concurrency.lockutils [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] Lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2410.251894] env[61964]: DEBUG nova.compute.manager [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] No waiting events found dispatching network-vif-plugged-f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2410.252067] env[61964]: WARNING nova.compute.manager [req-9ba800c0-ec14-4bdd-90ed-6df074e20f58 req-50cfe3ed-2397-4e4c-b4bc-5a5c948ea118 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Received unexpected event network-vif-plugged-f983c619-418f-463d-8330-078d12c258a4 for instance with vm_state building and task_state spawning. [ 2410.261452] env[61964]: DEBUG nova.policy [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2a54aa10d9490c9824f342bda68aaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ce335cfa25f4c0aa494bd2c87dda282', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2410.264369] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2410.295615] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2410.295865] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2410.296032] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2410.296239] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2410.296356] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2410.296501] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2410.296706] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2410.296868] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2410.297223] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2410.297458] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2410.297656] env[61964]: DEBUG nova.virt.hardware [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2410.298766] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a65c46-9f7b-4196-8237-57488f33b835 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.309650] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff12f22-13fd-4699-9a66-7cf5719e5dda {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.428450] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Successfully updated port: f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2410.443712] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2410.443869] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2410.444020] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2410.512394] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2410.517969] env[61964]: DEBUG nova.network.neutron [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Updating instance_info_cache with network_info: [{"id": "67fc8868-0816-4461-8ed9-1c34200f5e16", "address": "fa:16:3e:20:90:bd", "network": {"id": "c1c79d63-5676-4e03-a591-cb046d461540", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-617863065", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67fc8868-08", "ovs_interfaceid": "67fc8868-0816-4461-8ed9-1c34200f5e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55c7ff44-dbe4-4808-9547-501e885f456e", "address": "fa:16:3e:82:45:e1", "network": {"id": "427d54ba-0e99-4eb2-b8b7-a25bbdc86070", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-679417950", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "40c88d0201ba4420a2e8ae0d237c29f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c7ff44-db", "ovs_interfaceid": "55c7ff44-dbe4-4808-9547-501e885f456e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2410.535852] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Releasing lock "refresh_cache-63911858-5a79-4479-8c92-46afca980300" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2410.536279] env[61964]: DEBUG nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2410.536466] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2410.536991] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-187c9b47-e07a-477e-bf63-99a4df749a1a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.541540] env[61964]: DEBUG nova.compute.manager [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Received event network-vif-plugged-5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2410.541789] env[61964]: DEBUG oslo_concurrency.lockutils [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] Acquiring lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2410.541998] env[61964]: DEBUG oslo_concurrency.lockutils [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2410.542300] env[61964]: DEBUG oslo_concurrency.lockutils [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2410.542405] env[61964]: DEBUG nova.compute.manager [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] No waiting events found dispatching network-vif-plugged-5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2410.542568] env[61964]: WARNING nova.compute.manager [req-105bffce-0200-4b0b-bd25-193a0b322a36 req-f78eea25-c55b-4446-8aca-7f07595836f4 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Received unexpected event network-vif-plugged-5b29ac57-1a6f-4482-80e7-78c554da8a25 for instance with vm_state building and task_state spawning. [ 2410.546854] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d3623d-1833-4229-ae3c-549468739cb8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.577063] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63911858-5a79-4479-8c92-46afca980300 could not be found. [ 2410.577277] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2410.577454] env[61964]: INFO nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2410.577716] env[61964]: DEBUG oslo.service.loopingcall [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2410.577969] env[61964]: DEBUG nova.compute.manager [-] [instance: 63911858-5a79-4479-8c92-46afca980300] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2410.578076] env[61964]: DEBUG nova.network.neutron [-] [instance: 63911858-5a79-4479-8c92-46afca980300] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2410.711725] env[61964]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61964) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2410.712019] env[61964]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-69a4320b-ebe6-40fd-a937-21b52b8a129c'] [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.712549] env[61964]: ERROR oslo.service.loopingcall [ 2410.714165] env[61964]: ERROR nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.743187] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Successfully updated port: 5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2410.749353] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Updating instance_info_cache with network_info: [{"id": "f983c619-418f-463d-8330-078d12c258a4", "address": "fa:16:3e:18:6f:10", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf983c619-41", "ovs_interfaceid": "f983c619-418f-463d-8330-078d12c258a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2410.757563] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2410.757563] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2410.757696] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2410.760826] env[61964]: ERROR nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] exception_handler_v20(status_code, error_body) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise client_exc(message=error_message, [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Neutron server returns request_ids: ['req-69a4320b-ebe6-40fd-a937-21b52b8a129c'] [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] During handling of the above exception, another exception occurred: [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] Traceback (most recent call last): [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._delete_instance(context, instance, bdms) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._shutdown_instance(context, instance, bdms) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._try_deallocate_network(context, instance, requested_networks) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] with excutils.save_and_reraise_exception(): [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.force_reraise() [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise self.value [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] _deallocate_network_with_retries() [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return evt.wait() [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = hub.switch() [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.greenlet.switch() [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = func(*self.args, **self.kw) [ 2410.760826] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] result = f(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._deallocate_network( [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self.network_api.deallocate_for_instance( [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] data = neutron.list_ports(**search_opts) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.list('ports', self.ports_path, retrieve_all, [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] for r in self._pagination(collection, path, **params): [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] res = self.get(path, params=params) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.retry_request("GET", action, body=body, [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] return self.do_request(method, action, body=body, [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] ret = obj(*args, **kwargs) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] self._handle_fault_response(status_code, replybody, resp) [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.762408] env[61964]: ERROR nova.compute.manager [instance: 63911858-5a79-4479-8c92-46afca980300] [ 2410.776133] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2410.776442] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance network_info: |[{"id": "f983c619-418f-463d-8330-078d12c258a4", "address": "fa:16:3e:18:6f:10", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf983c619-41", "ovs_interfaceid": "f983c619-418f-463d-8330-078d12c258a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2410.776825] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:6f:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19440099-773e-4a31-b82e-84a4daa5d8fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f983c619-418f-463d-8330-078d12c258a4', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2410.784619] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating folder: Project (91a3f71b0c37437a98325ca4a2f48d46). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2410.785211] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-632c8bb0-682b-444b-b73a-7efaa826cec6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.793698] env[61964]: DEBUG oslo_concurrency.lockutils [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Lock "63911858-5a79-4479-8c92-46afca980300" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.092s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2410.796510] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created folder: Project (91a3f71b0c37437a98325ca4a2f48d46) in parent group-v351942. [ 2410.796701] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating folder: Instances. Parent ref: group-v352031. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2410.797130] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfe63552-66b3-4421-8a52-5cda25a0e23e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.806386] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created folder: Instances in parent group-v352031. [ 2410.806620] env[61964]: DEBUG oslo.service.loopingcall [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2410.807185] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2410.807403] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d71631fc-154d-41fd-bb38-e4db1369cceb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.832310] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2410.832310] env[61964]: value = "task-1688735" [ 2410.832310] env[61964]: _type = "Task" [ 2410.832310] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.843839] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688735, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.868975] env[61964]: INFO nova.compute.manager [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] [instance: 63911858-5a79-4479-8c92-46afca980300] Successfully reverted task state from None on failure for instance. [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server [None req-464009e8-ccb4-4fe7-90e6-8e456e7554f5 tempest-ServersTestMultiNic-274388626 tempest-ServersTestMultiNic-274388626-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-69a4320b-ebe6-40fd-a937-21b52b8a129c'] [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 2410.873744] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server raise self.value [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2410.875619] env[61964]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2410.877144] env[61964]: ERROR oslo_messaging.rpc.server [ 2410.891954] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Successfully created port: 26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2411.036268] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2411.255549] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Updating instance_info_cache with network_info: [{"id": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "address": "fa:16:3e:f4:4f:a6", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b29ac57-1a", "ovs_interfaceid": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2411.267012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2411.267323] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance network_info: |[{"id": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "address": "fa:16:3e:f4:4f:a6", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b29ac57-1a", "ovs_interfaceid": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2411.267698] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:4f:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b29ac57-1a6f-4482-80e7-78c554da8a25', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2411.275346] env[61964]: DEBUG oslo.service.loopingcall [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2411.276231] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2411.276463] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-348557cd-f719-455c-adef-06386fc3975b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.296928] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2411.296928] env[61964]: value = "task-1688736" [ 2411.296928] env[61964]: _type = "Task" [ 2411.296928] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.307071] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688736, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.341642] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688735, 'name': CreateVM_Task, 'duration_secs': 0.317744} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2411.341642] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2411.342361] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2411.342524] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2411.342914] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2411.343189] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6086596b-7ac1-496c-9337-1748c26abebd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.347720] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 2411.347720] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f517cf-182c-6d80-40d1-4fa9945d1d7f" [ 2411.347720] env[61964]: _type = "Task" [ 2411.347720] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.355642] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f517cf-182c-6d80-40d1-4fa9945d1d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.807052] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688736, 'name': CreateVM_Task, 'duration_secs': 0.37138} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2411.807052] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2411.808119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2411.817367] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Successfully updated port: 26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2411.827875] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2411.828032] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2411.828182] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2411.857980] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2411.858257] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2411.858469] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2411.858677] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2411.858972] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2411.859238] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcc19797-af9a-4780-9c3d-3a9a0800781d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.864042] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2411.864042] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52062aed-2e0b-b5bb-5737-ff6e5e9ecc70" [ 2411.864042] env[61964]: _type = "Task" [ 2411.864042] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.871783] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52062aed-2e0b-b5bb-5737-ff6e5e9ecc70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2411.872513] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2412.085053] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Updating instance_info_cache with network_info: [{"id": "26aac08a-dcb2-4228-8088-d6087d374098", "address": "fa:16:3e:7a:c4:70", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26aac08a-dc", "ovs_interfaceid": "26aac08a-dcb2-4228-8088-d6087d374098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.102306] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2412.102605] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance network_info: |[{"id": "26aac08a-dcb2-4228-8088-d6087d374098", "address": "fa:16:3e:7a:c4:70", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26aac08a-dc", "ovs_interfaceid": "26aac08a-dcb2-4228-8088-d6087d374098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2412.103012] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:c4:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26aac08a-dcb2-4228-8088-d6087d374098', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2412.111149] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating folder: Project (9ce335cfa25f4c0aa494bd2c87dda282). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2412.111407] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5853af9-3a33-46a3-a336-75114ab382a7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.122356] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created folder: Project (9ce335cfa25f4c0aa494bd2c87dda282) in parent group-v351942. [ 2412.122542] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating folder: Instances. Parent ref: group-v352035. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2412.122771] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b22e54f-c6e1-4d0d-aff6-2276a7cccaf3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.132354] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created folder: Instances in parent group-v352035. [ 2412.132578] env[61964]: DEBUG oslo.service.loopingcall [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2412.132756] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2412.132986] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bf0dab8-57b1-42bf-acbf-3ca485d6bd6f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.154877] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2412.154877] env[61964]: value = "task-1688739" [ 2412.154877] env[61964]: _type = "Task" [ 2412.154877] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2412.162851] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688739, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.346923] env[61964]: DEBUG nova.compute.manager [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Received event network-changed-f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2412.347200] env[61964]: DEBUG nova.compute.manager [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Refreshing instance network info cache due to event network-changed-f983c619-418f-463d-8330-078d12c258a4. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2412.347444] env[61964]: DEBUG oslo_concurrency.lockutils [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] Acquiring lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.347594] env[61964]: DEBUG oslo_concurrency.lockutils [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] Acquired lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2412.347648] env[61964]: DEBUG nova.network.neutron [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Refreshing network info cache for port f983c619-418f-463d-8330-078d12c258a4 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2412.375118] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2412.375393] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2412.375602] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.577617] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Received event network-changed-5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2412.577617] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Refreshing instance network info cache due to event network-changed-5b29ac57-1a6f-4482-80e7-78c554da8a25. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2412.578384] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Acquiring lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.578709] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Acquired lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2412.579021] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Refreshing network info cache for port 5b29ac57-1a6f-4482-80e7-78c554da8a25 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2412.599845] env[61964]: DEBUG nova.network.neutron [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Updated VIF entry in instance network info cache for port f983c619-418f-463d-8330-078d12c258a4. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2412.600171] env[61964]: DEBUG nova.network.neutron [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Updating instance_info_cache with network_info: [{"id": "f983c619-418f-463d-8330-078d12c258a4", "address": "fa:16:3e:18:6f:10", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf983c619-41", "ovs_interfaceid": "f983c619-418f-463d-8330-078d12c258a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.609504] env[61964]: DEBUG oslo_concurrency.lockutils [req-c543d53c-123b-43a3-8ad3-32538bd8ae74 req-5555f5d4-c9c6-47ad-b088-e558ebcecb59 service nova] Releasing lock "refresh_cache-031156ba-251e-4b8b-86bd-9c967adc808f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2412.664571] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688739, 'name': CreateVM_Task} progress is 99%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.848354] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Updated VIF entry in instance network info cache for port 5b29ac57-1a6f-4482-80e7-78c554da8a25. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2412.848733] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Updating instance_info_cache with network_info: [{"id": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "address": "fa:16:3e:f4:4f:a6", "network": {"id": "02afc8b9-c726-42b8-81c4-b48ca1fdbd7f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-161696213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40991aebb97545db8c04deda187dcdfd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b29ac57-1a", "ovs_interfaceid": "5b29ac57-1a6f-4482-80e7-78c554da8a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.858576] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Releasing lock "refresh_cache-1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2412.858858] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Received event network-vif-plugged-26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2412.859070] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Acquiring lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2412.859271] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2412.859433] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2412.859593] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] No waiting events found dispatching network-vif-plugged-26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2412.859753] env[61964]: WARNING nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Received unexpected event network-vif-plugged-26aac08a-dcb2-4228-8088-d6087d374098 for instance with vm_state building and task_state spawning. [ 2412.859911] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Received event network-changed-26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2412.860125] env[61964]: DEBUG nova.compute.manager [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Refreshing instance network info cache due to event network-changed-26aac08a-dcb2-4228-8088-d6087d374098. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2412.860395] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Acquiring lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2412.860572] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Acquired lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2412.860738] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Refreshing network info cache for port 26aac08a-dcb2-4228-8088-d6087d374098 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2413.105485] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Updated VIF entry in instance network info cache for port 26aac08a-dcb2-4228-8088-d6087d374098. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2413.105850] env[61964]: DEBUG nova.network.neutron [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Updating instance_info_cache with network_info: [{"id": "26aac08a-dcb2-4228-8088-d6087d374098", "address": "fa:16:3e:7a:c4:70", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26aac08a-dc", "ovs_interfaceid": "26aac08a-dcb2-4228-8088-d6087d374098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.115297] env[61964]: DEBUG oslo_concurrency.lockutils [req-6256b1bb-3a43-42ea-8f4b-3d4bd5bcd305 req-65767d65-eadb-43e2-9550-9f91782bb4a7 service nova] Releasing lock "refresh_cache-7d1977c2-cc88-4964-989a-9258f345c4f2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2413.165963] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688739, 'name': CreateVM_Task} progress is 99%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2413.666705] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688739, 'name': CreateVM_Task, 'duration_secs': 1.330694} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2413.667057] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2413.667579] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2413.667743] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2413.668066] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2413.668316] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85e94f62-f72e-4f44-983a-0354ce8d4425 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.672782] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 2413.672782] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52da0a06-30bf-8090-c9d1-b6515a93b889" [ 2413.672782] env[61964]: _type = "Task" [ 2413.672782] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2413.680349] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52da0a06-30bf-8090-c9d1-b6515a93b889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.183765] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2414.184033] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2414.184252] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2414.384245] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2414.384457] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2414.384521] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2414.405679] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.405805] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.405902] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406085] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406217] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406340] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406461] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406580] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406701] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406819] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2414.406936] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2415.384712] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.384297] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2420.584780] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2421.384028] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.384028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2423.385347] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.379139] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.383861] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.384078] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.384566] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.398805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2429.399048] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2429.399217] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2429.399373] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2429.400495] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eec4a69-06bc-4525-b409-b694e63c3525 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.409080] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a38d9f-5e8a-44f3-92fc-c4b77bf260fd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.424264] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8099660d-cbcd-444d-9d30-c08f2a1a413a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.430960] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8175d5c0-8549-4962-b7eb-75e44a76d67d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.459997] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181370MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2429.460360] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2429.460360] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2429.528283] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.528445] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.528568] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.528689] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.528808] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.528928] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.529116] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.529240] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.529355] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.529493] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2429.540486] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2429.540705] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2429.540848] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2429.662688] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa8a9e1-3305-4b19-beca-f494528cc61a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.670055] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947bc21a-dbec-428a-965c-383c84be54ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.700134] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c87ac5-3e6a-4ee6-a0ee-81ffb6ef41e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.707076] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b336f1d-b207-468e-a83e-620bfe4c862e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.719695] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2429.728370] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2429.743244] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2429.743424] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.283s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2449.032053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "031156ba-251e-4b8b-86bd-9c967adc808f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2454.900985] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2457.277013] env[61964]: WARNING oslo_vmware.rw_handles [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2457.277013] env[61964]: ERROR oslo_vmware.rw_handles [ 2457.277611] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2457.279569] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2457.280542] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Copying Virtual Disk [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/b6116ea9-718a-4e07-af00-4b740c3a6c08/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2457.280542] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bcc6de3-8400-4379-9458-46210d438424 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.291918] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for the task: (returnval){ [ 2457.291918] env[61964]: value = "task-1688740" [ 2457.291918] env[61964]: _type = "Task" [ 2457.291918] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.300918] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Task: {'id': task-1688740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2457.808991] env[61964]: DEBUG oslo_vmware.exceptions [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2457.809340] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2457.809961] env[61964]: ERROR nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2457.809961] env[61964]: Faults: ['InvalidArgument'] [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Traceback (most recent call last): [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] yield resources [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self.driver.spawn(context, instance, image_meta, [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self._fetch_image_if_missing(context, vi) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] image_cache(vi, tmp_image_ds_loc) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] vm_util.copy_virtual_disk( [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] session._wait_for_task(vmdk_copy_task) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return self.wait_for_task(task_ref) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return evt.wait() [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] result = hub.switch() [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return self.greenlet.switch() [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self.f(*self.args, **self.kw) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] raise exceptions.translate_fault(task_info.error) [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Faults: ['InvalidArgument'] [ 2457.809961] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] [ 2457.811275] env[61964]: INFO nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Terminating instance [ 2457.811901] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2457.812125] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2457.812369] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2748d5a-000c-4f89-abe8-2bd5aa6fb05e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.815884] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2457.816147] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2457.816850] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62df3089-646c-4b4b-a97b-23c13af0a6d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.824290] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2457.824599] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ae1b04a-60e5-419f-b699-a197d7684a50 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.827054] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2457.827343] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2457.828233] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-083d3af0-eca8-4661-b97f-7c705f4c7b5f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.833235] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2457.833235] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525b18d6-47ec-dcb7-2b40-4a79ce4a024e" [ 2457.833235] env[61964]: _type = "Task" [ 2457.833235] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.842363] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525b18d6-47ec-dcb7-2b40-4a79ce4a024e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2457.921293] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2457.921717] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2457.921844] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Deleting the datastore file [datastore1] c91a6c73-d161-488e-a27d-08c1ab3e3e80 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2457.922211] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adf34b97-b9b1-44b8-8c76-9f2836669a0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.930599] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for the task: (returnval){ [ 2457.930599] env[61964]: value = "task-1688742" [ 2457.930599] env[61964]: _type = "Task" [ 2457.930599] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.939814] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Task: {'id': task-1688742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2458.345086] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2458.345544] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating directory with path [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2458.345640] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0bf8222-97f5-4324-a1c8-7d6018064ba2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.358108] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created directory with path [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2458.358373] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Fetch image to [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2458.359538] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2458.359538] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9ffe33-f8fd-4a70-bb9e-8e65506db377 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.366441] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5522cde8-1f6c-43b4-b276-89bd3ecc6778 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.375721] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606cf923-2fa1-4af7-846a-2459ea4f8afd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.406299] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77329fd0-48f0-4a9b-a4c6-972c732cb573 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.412946] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-70a52816-4be8-45c4-b103-db8e455be7bc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.437119] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2458.448167] env[61964]: DEBUG oslo_vmware.api [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Task: {'id': task-1688742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090078} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2458.448167] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2458.448167] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2458.448167] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2458.448167] env[61964]: INFO nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2458.453388] env[61964]: DEBUG nova.compute.claims [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2458.453388] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2458.453388] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2458.525183] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2458.594956] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2458.594956] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2458.753451] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e76562e-4f30-4d34-9b4d-70a0eea84c3e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.761178] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88c619b-6afa-4eef-8cf2-222f10990c57 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.790477] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a003d8-6db0-40d4-88b2-6a26158e4d26 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.798069] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c288fbf6-38bb-4d22-bf43-fcbc9f582cbf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.811054] env[61964]: DEBUG nova.compute.provider_tree [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2458.819253] env[61964]: DEBUG nova.scheduler.client.report [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2458.834808] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.382s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2458.835463] env[61964]: ERROR nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2458.835463] env[61964]: Faults: ['InvalidArgument'] [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Traceback (most recent call last): [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self.driver.spawn(context, instance, image_meta, [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self._fetch_image_if_missing(context, vi) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] image_cache(vi, tmp_image_ds_loc) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] vm_util.copy_virtual_disk( [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] session._wait_for_task(vmdk_copy_task) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return self.wait_for_task(task_ref) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return evt.wait() [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] result = hub.switch() [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] return self.greenlet.switch() [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] self.f(*self.args, **self.kw) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] raise exceptions.translate_fault(task_info.error) [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Faults: ['InvalidArgument'] [ 2458.835463] env[61964]: ERROR nova.compute.manager [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] [ 2458.837018] env[61964]: DEBUG nova.compute.utils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2458.837750] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Build of instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 was re-scheduled: A specified parameter was not correct: fileType [ 2458.837750] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2458.838471] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2458.839011] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2458.839011] env[61964]: DEBUG nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2458.839011] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2459.294546] env[61964]: DEBUG nova.network.neutron [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2459.310856] env[61964]: INFO nova.compute.manager [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Took 0.47 seconds to deallocate network for instance. [ 2459.434890] env[61964]: INFO nova.scheduler.client.report [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Deleted allocations for instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 [ 2459.460294] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b1d664a8-01b3-4fcf-907c-28139a036cf3 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 481.999s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2459.461572] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 285.271s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2459.461816] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Acquiring lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2459.462170] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2459.462399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2459.465115] env[61964]: INFO nova.compute.manager [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Terminating instance [ 2459.467425] env[61964]: DEBUG nova.compute.manager [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2459.467675] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2459.468243] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfeec437-17bc-467e-a413-83a4416b4e18 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.476107] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2459.486630] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5f92a6-a359-4107-a115-182300c228c2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.517388] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c91a6c73-d161-488e-a27d-08c1ab3e3e80 could not be found. [ 2459.517864] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2459.517864] env[61964]: INFO nova.compute.manager [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2459.518144] env[61964]: DEBUG oslo.service.loopingcall [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2459.518413] env[61964]: DEBUG nova.compute.manager [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2459.518515] env[61964]: DEBUG nova.network.neutron [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2459.557715] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2459.560088] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2459.560088] env[61964]: INFO nova.compute.claims [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2459.562883] env[61964]: DEBUG nova.network.neutron [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2459.571379] env[61964]: INFO nova.compute.manager [-] [instance: c91a6c73-d161-488e-a27d-08c1ab3e3e80] Took 0.05 seconds to deallocate network for instance. [ 2459.698530] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8a885f2b-d404-4444-a4dd-96253c5fa631 tempest-ServerDiagnosticsNegativeTest-1024629625 tempest-ServerDiagnosticsNegativeTest-1024629625-project-member] Lock "c91a6c73-d161-488e-a27d-08c1ab3e3e80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2459.796908] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74edc04a-d755-4699-b9a3-927e8e762945 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.804598] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdc6ce1-96b5-4b54-af0b-026fbfae534a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.836250] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9eba3b-5d17-4f0b-82eb-2f60d5c3cc95 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.849023] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a682b90-b988-4aa7-b986-faa8a6751a81 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.859425] env[61964]: DEBUG nova.compute.provider_tree [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2459.868727] env[61964]: DEBUG nova.scheduler.client.report [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2459.890561] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.332s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2459.891073] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2459.929979] env[61964]: DEBUG nova.compute.utils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2459.933407] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2459.933574] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2459.944946] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2460.026461] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2460.041525] env[61964]: DEBUG nova.policy [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57bf9b92e90a4661af70153b7cdaad79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '020051b9fbe84c44829a5719217c4a2c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2460.051156] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2460.051404] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2460.051590] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2460.051734] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2460.051880] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2460.052120] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2460.052410] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2460.052611] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2460.052828] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2460.053221] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2460.053221] env[61964]: DEBUG nova.virt.hardware [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2460.054133] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75ae873-665e-4c8a-9465-c2099de31760 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.064076] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e48be31-5617-4008-ba14-035ef853462c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.481590] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Successfully created port: c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2460.590223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "04451950-2e85-46dd-a516-6b7743e03f7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2460.590368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2461.344282] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Successfully updated port: c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2461.369067] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2461.369067] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquired lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2461.369067] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2461.426886] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2461.430884] env[61964]: DEBUG nova.compute.manager [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Received event network-vif-plugged-c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2461.431105] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Acquiring lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2461.431316] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2461.431496] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2461.431701] env[61964]: DEBUG nova.compute.manager [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] No waiting events found dispatching network-vif-plugged-c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2461.431877] env[61964]: WARNING nova.compute.manager [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Received unexpected event network-vif-plugged-c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 for instance with vm_state building and task_state spawning. [ 2461.432065] env[61964]: DEBUG nova.compute.manager [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Received event network-changed-c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2461.432259] env[61964]: DEBUG nova.compute.manager [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Refreshing instance network info cache due to event network-changed-c8c6ad07-9c12-45a5-aef6-1da46cb4ab19. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2461.432635] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Acquiring lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2461.505188] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "7d1977c2-cc88-4964-989a-9258f345c4f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2461.688030] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Updating instance_info_cache with network_info: [{"id": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "address": "fa:16:3e:32:07:1a", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ad07-9c", "ovs_interfaceid": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2461.700605] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Releasing lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2461.700911] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance network_info: |[{"id": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "address": "fa:16:3e:32:07:1a", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ad07-9c", "ovs_interfaceid": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2461.701235] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Acquired lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2461.701415] env[61964]: DEBUG nova.network.neutron [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Refreshing network info cache for port c8c6ad07-9c12-45a5-aef6-1da46cb4ab19 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2461.702651] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:07:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8c6ad07-9c12-45a5-aef6-1da46cb4ab19', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2461.710033] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Creating folder: Project (020051b9fbe84c44829a5719217c4a2c). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2461.710833] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95bced2e-48a7-4e4c-819d-d50b1a482d03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.724829] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Created folder: Project (020051b9fbe84c44829a5719217c4a2c) in parent group-v351942. [ 2461.725015] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Creating folder: Instances. Parent ref: group-v352038. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2461.725283] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6847cea-9867-411f-89b3-88c86c8ee05e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.734172] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Created folder: Instances in parent group-v352038. [ 2461.734386] env[61964]: DEBUG oslo.service.loopingcall [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2461.734570] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2461.734829] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c192fcb2-d461-4cc1-a369-8c5adbf0c682 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.755506] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2461.755506] env[61964]: value = "task-1688745" [ 2461.755506] env[61964]: _type = "Task" [ 2461.755506] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.762873] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688745, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.953989] env[61964]: DEBUG nova.network.neutron [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Updated VIF entry in instance network info cache for port c8c6ad07-9c12-45a5-aef6-1da46cb4ab19. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2461.954562] env[61964]: DEBUG nova.network.neutron [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Updating instance_info_cache with network_info: [{"id": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "address": "fa:16:3e:32:07:1a", "network": {"id": "c25245b9-44db-401b-b5f8-5311ab2dd633", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc053e04e4de43ce9a033f1ecfad3809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c6ad07-9c", "ovs_interfaceid": "c8c6ad07-9c12-45a5-aef6-1da46cb4ab19", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2461.964580] env[61964]: DEBUG oslo_concurrency.lockutils [req-f4ba0c24-3b28-4c64-b96c-eac3096f81e9 req-71a2de6f-6bfe-4ee8-b431-db3f0563c52f service nova] Releasing lock "refresh_cache-f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2462.265219] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688745, 'name': CreateVM_Task, 'duration_secs': 0.306796} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.265501] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2462.266178] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2462.266342] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2462.266650] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2462.266916] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba3bbd72-e655-4412-826f-e7a195f63b4c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.271654] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for the task: (returnval){ [ 2462.271654] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528a8f8d-3958-d437-e83c-0ba1f2650b3b" [ 2462.271654] env[61964]: _type = "Task" [ 2462.271654] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.279259] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528a8f8d-3958-d437-e83c-0ba1f2650b3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.782543] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2462.782828] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2462.783033] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2475.745670] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2475.745964] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2475.745964] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2475.771567] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771567] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771567] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771814] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771814] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771895] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.771945] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.772081] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.772198] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.772339] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2475.772472] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2475.773063] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2477.384682] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2481.384408] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2481.384712] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2484.384629] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2485.384351] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2485.384645] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2487.379550] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2489.384588] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2489.399626] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2489.399874] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2489.400097] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2489.400285] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2489.406024] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5eb71c-456b-4856-bf7b-20f420f9eca9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.413015] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f174932e-5766-40f0-9496-3b2711998720 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.428460] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114d180b-b229-4b4c-b059-957fdbf3d29b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.435281] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679c49e3-fa6f-4cb2-8118-041d4d0b7ae6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.470781] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2489.471329] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2489.471765] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 9ae01818-da08-4137-97c0-bc4c57759d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.558020] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2489.575118] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2489.575118] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2489.575118] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2489.744076] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f502067-b171-42fe-b250-244bba6fed12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.750840] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c918e5-958f-4188-b0a8-afedcd72b85a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.793326] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f078738-580a-4470-8f37-82f98b11e462 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.803390] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea35a29e-1a55-4ef4-b407-06f39f94fedd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2489.817158] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2489.825940] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2489.842083] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2489.842277] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.371s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2490.534782] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "841fd145-2c83-46a5-be0e-d0c6de409f67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2490.535145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2491.836624] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.291747] env[61964]: WARNING oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2507.291747] env[61964]: ERROR oslo_vmware.rw_handles [ 2507.292467] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2507.294169] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2507.294451] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Copying Virtual Disk [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/de13d0cf-41df-4dda-bb7d-4dc8cc9ad2a7/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2507.294796] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c46036aa-d775-4920-9b0f-4d84486c2010 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.303233] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2507.303233] env[61964]: value = "task-1688746" [ 2507.303233] env[61964]: _type = "Task" [ 2507.303233] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.310731] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2507.813660] env[61964]: DEBUG oslo_vmware.exceptions [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2507.813942] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2507.814521] env[61964]: ERROR nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2507.814521] env[61964]: Faults: ['InvalidArgument'] [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Traceback (most recent call last): [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] yield resources [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self.driver.spawn(context, instance, image_meta, [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self._fetch_image_if_missing(context, vi) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] image_cache(vi, tmp_image_ds_loc) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] vm_util.copy_virtual_disk( [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] session._wait_for_task(vmdk_copy_task) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return self.wait_for_task(task_ref) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return evt.wait() [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] result = hub.switch() [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return self.greenlet.switch() [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self.f(*self.args, **self.kw) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] raise exceptions.translate_fault(task_info.error) [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Faults: ['InvalidArgument'] [ 2507.814521] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] [ 2507.815698] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Terminating instance [ 2507.816355] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2507.816555] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2507.816786] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cac78d6a-001e-40d2-a086-ec0daa80d5be {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.819046] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2507.819237] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2507.819924] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07afd1f-5bbb-4e6a-9f6e-8442a0149380 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.826671] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2507.826867] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a574f9f-f35f-4b47-bb18-a3ae69111da4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.828867] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2507.829048] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2507.829936] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0078b939-94fd-430e-96e0-9901c79c1354 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2507.835920] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2507.835920] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52960374-7b31-f393-596f-f8649a3ddb91" [ 2507.835920] env[61964]: _type = "Task" [ 2507.835920] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2507.843138] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52960374-7b31-f393-596f-f8649a3ddb91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.105389] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2508.105625] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2508.105803] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleting the datastore file [datastore1] 9ae01818-da08-4137-97c0-bc4c57759d46 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2508.106083] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37c1f644-dc10-45a6-86bf-50b5b6ba0c4e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.112492] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2508.112492] env[61964]: value = "task-1688748" [ 2508.112492] env[61964]: _type = "Task" [ 2508.112492] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.120250] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.346404] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2508.346723] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating directory with path [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2508.346894] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af26faa8-7b05-443c-8b90-6ff3fbb3c028 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.357407] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Created directory with path [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2508.357542] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Fetch image to [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2508.357705] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2508.358404] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18dfc36-4412-4a27-b95b-e43ba6304d1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.364918] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551b2830-88b1-4d5a-9ee5-6540a46fe90c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.373645] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401a4dac-2ce9-4e07-847d-3487e35c8cf2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.403063] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7881dc97-e1f9-4ddb-b2d0-c37557fc4254 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.408256] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5d703194-f578-4243-b135-6709728df7fb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.428283] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2508.477972] env[61964]: DEBUG oslo_vmware.rw_handles [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2508.537952] env[61964]: DEBUG oslo_vmware.rw_handles [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2508.538239] env[61964]: DEBUG oslo_vmware.rw_handles [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2508.623535] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085349} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2508.623535] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2508.623535] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2508.623535] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2508.623856] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Took 0.80 seconds to destroy the instance on the hypervisor. [ 2508.627295] env[61964]: DEBUG nova.compute.claims [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2508.627467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2508.627679] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2508.807134] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0088a39c-e06c-47de-b365-1d87bf20dda0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.816248] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a1ed03-3a4c-478a-9d9c-aabdc4f5e141 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.845351] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d90596d-cd12-46cc-a528-fe0b1c2be526 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.852847] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03441d19-5908-4d32-a46e-4b10ca5682dd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.865936] env[61964]: DEBUG nova.compute.provider_tree [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2508.876248] env[61964]: DEBUG nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2508.890537] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.263s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2508.891071] env[61964]: ERROR nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2508.891071] env[61964]: Faults: ['InvalidArgument'] [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Traceback (most recent call last): [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self.driver.spawn(context, instance, image_meta, [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self._fetch_image_if_missing(context, vi) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] image_cache(vi, tmp_image_ds_loc) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] vm_util.copy_virtual_disk( [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] session._wait_for_task(vmdk_copy_task) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return self.wait_for_task(task_ref) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return evt.wait() [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] result = hub.switch() [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] return self.greenlet.switch() [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] self.f(*self.args, **self.kw) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] raise exceptions.translate_fault(task_info.error) [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Faults: ['InvalidArgument'] [ 2508.891071] env[61964]: ERROR nova.compute.manager [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] [ 2508.892025] env[61964]: DEBUG nova.compute.utils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2508.893221] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Build of instance 9ae01818-da08-4137-97c0-bc4c57759d46 was re-scheduled: A specified parameter was not correct: fileType [ 2508.893221] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2508.893604] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2508.893772] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2508.893939] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2508.894124] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2509.152238] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2509.164615] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Took 0.27 seconds to deallocate network for instance. [ 2509.258916] env[61964]: INFO nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleted allocations for instance 9ae01818-da08-4137-97c0-bc4c57759d46 [ 2509.279656] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 453.337s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2509.280769] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 257.083s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2509.280995] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2509.281319] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2509.281493] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2509.283561] env[61964]: INFO nova.compute.manager [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Terminating instance [ 2509.285350] env[61964]: DEBUG nova.compute.manager [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2509.285545] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2509.286018] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52c781c3-3a01-4cf2-a46e-bb5015094ea5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.295050] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03c8535-e4b0-43dc-b1d9-b9a5a6364f5b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.306936] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2509.326823] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ae01818-da08-4137-97c0-bc4c57759d46 could not be found. [ 2509.327017] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2509.327195] env[61964]: INFO nova.compute.manager [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2509.327440] env[61964]: DEBUG oslo.service.loopingcall [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2509.327655] env[61964]: DEBUG nova.compute.manager [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2509.327752] env[61964]: DEBUG nova.network.neutron [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2509.350036] env[61964]: DEBUG nova.network.neutron [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2509.357200] env[61964]: INFO nova.compute.manager [-] [instance: 9ae01818-da08-4137-97c0-bc4c57759d46] Took 0.03 seconds to deallocate network for instance. [ 2509.358101] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2509.358335] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2509.359855] env[61964]: INFO nova.compute.claims [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2509.454941] env[61964]: DEBUG oslo_concurrency.lockutils [None req-3f5d1ec7-d5d2-431c-a558-cfa5477d2357 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "9ae01818-da08-4137-97c0-bc4c57759d46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2509.536364] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77337fb-a4db-4c24-833b-524bde91424a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.544094] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7c82b7-9e19-4d5a-914a-d042acdfd5f6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.573187] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36eae00b-acf4-4d0d-9030-2a839d660cc0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.580653] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6570b3fd-002f-430a-886a-08a826bbb6cf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.595518] env[61964]: DEBUG nova.compute.provider_tree [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2509.604611] env[61964]: DEBUG nova.scheduler.client.report [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2509.617593] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2509.618009] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2509.655854] env[61964]: DEBUG nova.compute.utils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2509.657642] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2509.657642] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2509.666750] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2509.734033] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2509.736186] env[61964]: DEBUG nova.policy [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c72bf2e6548a4412909523ac7bb404d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b25939891d8149968749f7e267cc4209', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2509.758579] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2509.758822] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2509.758982] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2509.759184] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2509.759324] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2509.759470] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2509.759674] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2509.759850] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2509.759997] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2509.760202] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2509.760373] env[61964]: DEBUG nova.virt.hardware [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2509.761239] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7a5c17-cb93-4794-b65a-033a95390fdc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.769104] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00331d6c-0fc3-4e79-8e43-36caa03d0605 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.057334] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Successfully created port: 36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2510.783132] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Successfully updated port: 36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2510.796698] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2510.796856] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquired lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2510.796994] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2510.842758] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2511.029697] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Updating instance_info_cache with network_info: [{"id": "36efc31b-ad66-4809-9eab-27e08ebdb976", "address": "fa:16:3e:74:7d:bb", "network": {"id": "fdd83641-8d04-452e-bd90-580dd6f90138", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-515871203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b25939891d8149968749f7e267cc4209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36efc31b-ad", "ovs_interfaceid": "36efc31b-ad66-4809-9eab-27e08ebdb976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2511.043804] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Releasing lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2511.044107] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance network_info: |[{"id": "36efc31b-ad66-4809-9eab-27e08ebdb976", "address": "fa:16:3e:74:7d:bb", "network": {"id": "fdd83641-8d04-452e-bd90-580dd6f90138", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-515871203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b25939891d8149968749f7e267cc4209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36efc31b-ad", "ovs_interfaceid": "36efc31b-ad66-4809-9eab-27e08ebdb976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2511.044501] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:7d:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36efc31b-ad66-4809-9eab-27e08ebdb976', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2511.052069] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Creating folder: Project (b25939891d8149968749f7e267cc4209). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2511.052660] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46acccb6-bf4c-4df0-85ef-2b49dfc61fa8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.063903] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Created folder: Project (b25939891d8149968749f7e267cc4209) in parent group-v351942. [ 2511.064100] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Creating folder: Instances. Parent ref: group-v352041. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2511.064333] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8b82396-f809-4c42-bf71-36bfffab4f87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.073021] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Created folder: Instances in parent group-v352041. [ 2511.073283] env[61964]: DEBUG oslo.service.loopingcall [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2511.073431] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2511.073632] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ec9e4e5-bf7c-4ae4-ba07-c916091f8cb0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.093027] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2511.093027] env[61964]: value = "task-1688751" [ 2511.093027] env[61964]: _type = "Task" [ 2511.093027] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.100626] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688751, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.193065] env[61964]: DEBUG nova.compute.manager [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Received event network-vif-plugged-36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2511.193328] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Acquiring lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2511.193591] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2511.193789] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2511.193971] env[61964]: DEBUG nova.compute.manager [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] No waiting events found dispatching network-vif-plugged-36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2511.194182] env[61964]: WARNING nova.compute.manager [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Received unexpected event network-vif-plugged-36efc31b-ad66-4809-9eab-27e08ebdb976 for instance with vm_state building and task_state spawning. [ 2511.194343] env[61964]: DEBUG nova.compute.manager [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Received event network-changed-36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2511.194494] env[61964]: DEBUG nova.compute.manager [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Refreshing instance network info cache due to event network-changed-36efc31b-ad66-4809-9eab-27e08ebdb976. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2511.194811] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Acquiring lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2511.195037] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Acquired lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2511.195217] env[61964]: DEBUG nova.network.neutron [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Refreshing network info cache for port 36efc31b-ad66-4809-9eab-27e08ebdb976 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2511.602993] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688751, 'name': CreateVM_Task, 'duration_secs': 0.322364} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2511.603252] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2511.603916] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2511.604107] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2511.604424] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2511.604698] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34a9dff9-354f-4804-b71e-a7cdc7dcca5b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.609020] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for the task: (returnval){ [ 2511.609020] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e6ba12-a9a0-3b1b-7a66-4fd63371ca25" [ 2511.609020] env[61964]: _type = "Task" [ 2511.609020] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.616722] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e6ba12-a9a0-3b1b-7a66-4fd63371ca25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.736137] env[61964]: DEBUG nova.network.neutron [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Updated VIF entry in instance network info cache for port 36efc31b-ad66-4809-9eab-27e08ebdb976. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2511.736498] env[61964]: DEBUG nova.network.neutron [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Updating instance_info_cache with network_info: [{"id": "36efc31b-ad66-4809-9eab-27e08ebdb976", "address": "fa:16:3e:74:7d:bb", "network": {"id": "fdd83641-8d04-452e-bd90-580dd6f90138", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-515871203-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b25939891d8149968749f7e267cc4209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36efc31b-ad", "ovs_interfaceid": "36efc31b-ad66-4809-9eab-27e08ebdb976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2511.747029] env[61964]: DEBUG oslo_concurrency.lockutils [req-63051ab4-32a9-4db0-a683-f507d7822793 req-6cda4879-b27e-44f8-b4ad-05aa055eac27 service nova] Releasing lock "refresh_cache-04451950-2e85-46dd-a516-6b7743e03f7d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2512.118924] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2512.119222] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2512.119408] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2536.384138] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2536.384513] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2536.384513] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2536.412807] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.412991] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.413451] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.413605] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.413735] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.413861] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.413984] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.414303] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.414444] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.414565] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2536.414686] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2536.415226] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2539.384574] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2541.384558] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2541.384857] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2545.384405] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2545.384733] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2547.379472] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2547.383080] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2550.384133] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2550.396227] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2550.396450] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2550.396616] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2550.396769] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2550.398248] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4363a3d3-c86b-4e20-b07d-c2d819e8692f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.406750] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d71844-2cdd-4d2d-991e-ababe9030148 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.420769] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f93d224-778d-426d-a7a8-b38157291963 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.427175] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b77857-7dfe-4e81-8366-482f9b6d6b1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.457455] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2550.457455] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2550.457616] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2550.539082] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539257] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539385] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539506] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539624] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539748] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539854] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.539969] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.540101] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.540219] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2550.551228] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2550.551451] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2550.551598] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2550.673715] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f48e31b-77b1-4003-826a-37c6964e65e2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.680974] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172c03d9-3be3-4f13-bde7-43ab6a202f82 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.709733] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0f5945-9dd3-4500-b88e-74f9f7886587 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.716131] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c1a563-96ca-4ab6-8e93-9dc1389f5c1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2550.728427] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2550.736757] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2550.751232] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2550.751420] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.294s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2557.309230] env[61964]: WARNING oslo_vmware.rw_handles [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2557.309230] env[61964]: ERROR oslo_vmware.rw_handles [ 2557.309962] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2557.311667] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2557.311923] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Copying Virtual Disk [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/1e3f0136-9fbd-4b4a-ba39-08a80cd847a4/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2557.312259] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99af3bcf-b68a-460c-8670-053d9cebfa49 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.319883] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2557.319883] env[61964]: value = "task-1688752" [ 2557.319883] env[61964]: _type = "Task" [ 2557.319883] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2557.327185] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2557.829889] env[61964]: DEBUG oslo_vmware.exceptions [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2557.830204] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2557.830735] env[61964]: ERROR nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2557.830735] env[61964]: Faults: ['InvalidArgument'] [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Traceback (most recent call last): [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] yield resources [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self.driver.spawn(context, instance, image_meta, [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self._fetch_image_if_missing(context, vi) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] image_cache(vi, tmp_image_ds_loc) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] vm_util.copy_virtual_disk( [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] session._wait_for_task(vmdk_copy_task) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return self.wait_for_task(task_ref) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return evt.wait() [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] result = hub.switch() [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return self.greenlet.switch() [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self.f(*self.args, **self.kw) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] raise exceptions.translate_fault(task_info.error) [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Faults: ['InvalidArgument'] [ 2557.830735] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] [ 2557.831807] env[61964]: INFO nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Terminating instance [ 2557.832556] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2557.832758] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2557.832993] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccfb02cc-d7a7-4422-86cc-3bcab7418694 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.835074] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2557.835254] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2557.835939] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda0f921-7ed0-4629-8a7f-27470ad09887 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.842289] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2557.842490] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c72e6b5d-d2e1-4ba7-a762-34852cc6d80e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.844527] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2557.844698] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2557.845617] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82799a90-0ab0-43fe-b369-16062ace5683 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.849953] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2557.849953] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52077dc2-9d51-d35d-5729-74992ba95d27" [ 2557.849953] env[61964]: _type = "Task" [ 2557.849953] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2557.856932] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52077dc2-9d51-d35d-5729-74992ba95d27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2557.969379] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2557.969594] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2557.969769] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleting the datastore file [datastore1] 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2557.970047] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36073344-18bf-4a15-8c5c-21d1c082446a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.975623] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for the task: (returnval){ [ 2557.975623] env[61964]: value = "task-1688754" [ 2557.975623] env[61964]: _type = "Task" [ 2557.975623] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2557.982772] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2558.360738] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2558.361049] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating directory with path [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2558.361241] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85e5d8fc-9e4e-4eb9-b10a-dffc596fc6af {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.372293] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Created directory with path [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2558.372482] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Fetch image to [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2558.372652] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2558.373397] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da837131-26dd-4ca8-aa44-b3b13dc1af8d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.379469] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81d91af-eea0-4428-890e-d3bbe6bee71b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.388185] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c012cf0-da10-4f35-a24f-74018205b5fa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.418778] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578e95b6-ac54-4123-b71c-514d95bce06e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.423960] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-90d891c7-f4c3-4c6f-82db-b0ba4d7d52c1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.444554] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2558.484341] env[61964]: DEBUG oslo_vmware.api [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Task: {'id': task-1688754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063975} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2558.484571] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2558.484744] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2558.485935] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2558.485935] env[61964]: INFO nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Took 0.65 seconds to destroy the instance on the hypervisor. [ 2558.487175] env[61964]: DEBUG nova.compute.claims [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2558.487350] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2558.487564] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2558.494017] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2558.554227] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2558.554309] env[61964]: DEBUG oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2558.680670] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7265f8a-1213-4bff-9e3a-5f5124bf138c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.688734] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5346f2-3633-487a-b890-2e19660ae521 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.718223] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14baeebe-0352-42df-8de6-07e6f733dbcf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.725126] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe00ccea-eb86-48b3-92f2-8210bc590552 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2558.737689] env[61964]: DEBUG nova.compute.provider_tree [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2558.746136] env[61964]: DEBUG nova.scheduler.client.report [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2558.760037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.272s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2558.760414] env[61964]: ERROR nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2558.760414] env[61964]: Faults: ['InvalidArgument'] [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Traceback (most recent call last): [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self.driver.spawn(context, instance, image_meta, [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self._fetch_image_if_missing(context, vi) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] image_cache(vi, tmp_image_ds_loc) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] vm_util.copy_virtual_disk( [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] session._wait_for_task(vmdk_copy_task) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return self.wait_for_task(task_ref) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return evt.wait() [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] result = hub.switch() [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] return self.greenlet.switch() [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] self.f(*self.args, **self.kw) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] raise exceptions.translate_fault(task_info.error) [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Faults: ['InvalidArgument'] [ 2558.760414] env[61964]: ERROR nova.compute.manager [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] [ 2558.761470] env[61964]: DEBUG nova.compute.utils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2558.762456] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Build of instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 was re-scheduled: A specified parameter was not correct: fileType [ 2558.762456] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2558.762814] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2558.762983] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2558.763166] env[61964]: DEBUG nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2558.763329] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2559.096400] env[61964]: DEBUG nova.network.neutron [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2559.108634] env[61964]: INFO nova.compute.manager [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Took 0.35 seconds to deallocate network for instance. [ 2559.201031] env[61964]: INFO nova.scheduler.client.report [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Deleted allocations for instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 [ 2559.224500] env[61964]: DEBUG oslo_concurrency.lockutils [None req-04adfb8c-aa41-459e-8e62-e206fe84e6a9 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 497.715s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2559.225921] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 301.826s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2559.226227] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Acquiring lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2559.226316] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2559.226516] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2559.228305] env[61964]: INFO nova.compute.manager [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Terminating instance [ 2559.230362] env[61964]: DEBUG nova.compute.manager [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2559.230556] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2559.230814] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f580cbaa-d96f-4a7b-bc4a-35a07b00ac95 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.241486] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936d2bd3-1a28-4d9e-acdf-e9b809a1b5f6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.253604] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2559.274149] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c21315f-18a1-4cc4-b4b7-cfb07b06c379 could not be found. [ 2559.274390] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2559.274544] env[61964]: INFO nova.compute.manager [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2559.274931] env[61964]: DEBUG oslo.service.loopingcall [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2559.275042] env[61964]: DEBUG nova.compute.manager [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2559.275212] env[61964]: DEBUG nova.network.neutron [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2559.302179] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2559.302450] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2559.303996] env[61964]: INFO nova.compute.claims [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2559.313871] env[61964]: DEBUG nova.network.neutron [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2559.350635] env[61964]: INFO nova.compute.manager [-] [instance: 4c21315f-18a1-4cc4-b4b7-cfb07b06c379] Took 0.08 seconds to deallocate network for instance. [ 2559.446750] env[61964]: DEBUG oslo_concurrency.lockutils [None req-9c48fd33-ae25-43d5-a0b6-e824bbc01083 tempest-AttachInterfacesTestJSON-791539478 tempest-AttachInterfacesTestJSON-791539478-project-member] Lock "4c21315f-18a1-4cc4-b4b7-cfb07b06c379" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2559.507390] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1870e0-3d98-4e36-96a9-4ff96fec7275 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.514930] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a56889-cd76-4b0f-9684-1dc880abcd14 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.545788] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d5a899-0ae2-46ad-8ec8-d076f1fcc025 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.552750] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4c7772-84cb-4cfb-8754-82f7f49596ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.566392] env[61964]: DEBUG nova.compute.provider_tree [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2559.576105] env[61964]: DEBUG nova.scheduler.client.report [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2559.591169] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2559.591685] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2559.629522] env[61964]: DEBUG nova.compute.utils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2559.630768] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2559.630927] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2559.639049] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2559.703289] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2559.730123] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2559.730384] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2559.730539] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2559.730713] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2559.730856] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2559.730997] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2559.731225] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2559.731384] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2559.731547] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2559.731704] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2559.731872] env[61964]: DEBUG nova.virt.hardware [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2559.732754] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d8112e-decf-43c3-a22e-01dcd1d22fde {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.741182] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a7797c-2e4e-4229-b4e9-e5d326b0334c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.953756] env[61964]: DEBUG nova.policy [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd85799edb52540a9841505de3c5da6f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59626e94ae3944c8863fe312dd97d9bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2560.349044] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Successfully created port: b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2561.128103] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Successfully updated port: b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2561.139275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2561.139275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2561.139275] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2561.167670] env[61964]: DEBUG nova.compute.manager [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Received event network-vif-plugged-b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2561.167670] env[61964]: DEBUG oslo_concurrency.lockutils [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] Acquiring lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2561.167670] env[61964]: DEBUG oslo_concurrency.lockutils [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2561.168249] env[61964]: DEBUG oslo_concurrency.lockutils [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2561.169167] env[61964]: DEBUG nova.compute.manager [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] No waiting events found dispatching network-vif-plugged-b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2561.169167] env[61964]: WARNING nova.compute.manager [req-cb4d8d99-ca10-4eb0-bdb4-ece51fb5aa71 req-b11133a8-dac4-44f4-a6f4-4741a74d52d4 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Received unexpected event network-vif-plugged-b11bb420-a728-4d50-bbdf-b67ebd256f89 for instance with vm_state building and task_state spawning. [ 2561.183233] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2561.350308] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Updating instance_info_cache with network_info: [{"id": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "address": "fa:16:3e:9c:66:4b", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb11bb420-a7", "ovs_interfaceid": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2561.364844] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2561.365216] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance network_info: |[{"id": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "address": "fa:16:3e:9c:66:4b", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb11bb420-a7", "ovs_interfaceid": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2561.365604] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:66:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b11bb420-a728-4d50-bbdf-b67ebd256f89', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2561.373407] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating folder: Project (59626e94ae3944c8863fe312dd97d9bb). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2561.373926] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d7c320d-10b3-4d87-a6fd-ef228b7ecb69 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.384606] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created folder: Project (59626e94ae3944c8863fe312dd97d9bb) in parent group-v351942. [ 2561.384781] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating folder: Instances. Parent ref: group-v352044. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2561.385011] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2cc9e48-b6e3-439f-9ce7-75cfe3341e2d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.394029] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created folder: Instances in parent group-v352044. [ 2561.394120] env[61964]: DEBUG oslo.service.loopingcall [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2561.394520] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2561.394520] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8acc566d-1f18-40b5-b030-2237e7e015f8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.414090] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2561.414090] env[61964]: value = "task-1688757" [ 2561.414090] env[61964]: _type = "Task" [ 2561.414090] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2561.421612] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688757, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2561.923979] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688757, 'name': CreateVM_Task, 'duration_secs': 0.382988} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2561.924277] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2561.925042] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2561.925305] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2561.925690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2561.926009] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf3ac2c9-963b-44dd-9481-4c50158664c9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.930311] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 2561.930311] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528fe055-ca47-0713-58e8-90fa944f1a50" [ 2561.930311] env[61964]: _type = "Task" [ 2561.930311] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2561.937968] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528fe055-ca47-0713-58e8-90fa944f1a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2562.441013] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2562.441375] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2562.441519] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2563.198022] env[61964]: DEBUG nova.compute.manager [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Received event network-changed-b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2563.198231] env[61964]: DEBUG nova.compute.manager [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Refreshing instance network info cache due to event network-changed-b11bb420-a728-4d50-bbdf-b67ebd256f89. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2563.198438] env[61964]: DEBUG oslo_concurrency.lockutils [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] Acquiring lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2563.198584] env[61964]: DEBUG oslo_concurrency.lockutils [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] Acquired lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2563.198799] env[61964]: DEBUG nova.network.neutron [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Refreshing network info cache for port b11bb420-a728-4d50-bbdf-b67ebd256f89 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2563.501921] env[61964]: DEBUG nova.network.neutron [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Updated VIF entry in instance network info cache for port b11bb420-a728-4d50-bbdf-b67ebd256f89. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2563.502333] env[61964]: DEBUG nova.network.neutron [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Updating instance_info_cache with network_info: [{"id": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "address": "fa:16:3e:9c:66:4b", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb11bb420-a7", "ovs_interfaceid": "b11bb420-a728-4d50-bbdf-b67ebd256f89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2563.512482] env[61964]: DEBUG oslo_concurrency.lockutils [req-b67f96cc-9218-4f7b-801b-19d8c206b9aa req-59818246-a37c-4e57-8712-2b2471d0d857 service nova] Releasing lock "refresh_cache-841fd145-2c83-46a5-be0e-d0c6de409f67" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2567.747239] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2596.752052] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2596.752052] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2596.752402] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2596.776312] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777028] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777422] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777422] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777422] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777535] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2596.777587] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2597.383441] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2599.384231] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2601.384290] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2601.384716] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2602.565429] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2602.565951] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 2602.565951] env[61964]: value = "domain-c8" [ 2602.565951] env[61964]: _type = "ClusterComputeResource" [ 2602.565951] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2602.567416] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fce49f0-fdb2-49ec-bf91-e9457d49d102 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2602.584741] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 10 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2604.592597] env[61964]: WARNING oslo_vmware.rw_handles [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2604.592597] env[61964]: ERROR oslo_vmware.rw_handles [ 2604.593386] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2604.595147] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2604.595394] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Copying Virtual Disk [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/bec09ca6-671b-4804-a76e-8811a7b2996e/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2604.595723] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef3f919d-2578-482b-b215-e001eb4274fe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2604.603527] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2604.603527] env[61964]: value = "task-1688758" [ 2604.603527] env[61964]: _type = "Task" [ 2604.603527] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2604.611587] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2605.114065] env[61964]: DEBUG oslo_vmware.exceptions [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2605.114065] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2605.114365] env[61964]: ERROR nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2605.114365] env[61964]: Faults: ['InvalidArgument'] [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Traceback (most recent call last): [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] yield resources [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self.driver.spawn(context, instance, image_meta, [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self._fetch_image_if_missing(context, vi) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] image_cache(vi, tmp_image_ds_loc) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] vm_util.copy_virtual_disk( [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] session._wait_for_task(vmdk_copy_task) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return self.wait_for_task(task_ref) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return evt.wait() [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] result = hub.switch() [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return self.greenlet.switch() [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self.f(*self.args, **self.kw) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] raise exceptions.translate_fault(task_info.error) [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Faults: ['InvalidArgument'] [ 2605.114365] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] [ 2605.115528] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Terminating instance [ 2605.116224] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2605.116434] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2605.116694] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c18d94b6-cc96-4fcd-a100-1cc090d6c693 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.118776] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2605.118973] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2605.119671] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c97c40-1408-43cc-ba0f-bd79515913ac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.126248] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2605.126446] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17e5e92b-405a-43c0-8190-04b2aed8831a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.128424] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2605.128597] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2605.129500] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52042c9a-0bba-405f-977b-26cd49d5804f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.134262] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for the task: (returnval){ [ 2605.134262] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f0ea11-6c2e-05e2-c8d3-42a61752b56f" [ 2605.134262] env[61964]: _type = "Task" [ 2605.134262] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2605.141035] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f0ea11-6c2e-05e2-c8d3-42a61752b56f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2605.197061] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2605.197254] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2605.197434] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleting the datastore file [datastore1] bae8f345-41e7-4adb-a44c-d91347fb4c7a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2605.197694] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-feb4ac11-c2fc-46f6-bbf1-290e1b03f82a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.203747] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for the task: (returnval){ [ 2605.203747] env[61964]: value = "task-1688760" [ 2605.203747] env[61964]: _type = "Task" [ 2605.203747] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2605.211280] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2605.420194] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2605.420478] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2605.644262] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2605.644544] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Creating directory with path [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2605.644788] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b25475f7-c0af-4b59-b8f2-82cf781238ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.657098] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Created directory with path [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2605.657288] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Fetch image to [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2605.657454] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2605.658206] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923389d1-0081-44df-99e2-9ccc20941442 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.664641] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9454a220-5f82-4b3b-b0dd-12b03184584a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.674352] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cab27d-c40b-40e1-98bb-42d9b68cb028 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.708370] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a8faa3-9c0a-4e43-9b75-7b7cf40ae11e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.715350] env[61964]: DEBUG oslo_vmware.api [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Task: {'id': task-1688760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072464} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2605.716769] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2605.716961] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2605.717150] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2605.717323] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2605.719082] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a9b83baa-8814-4fdb-8fcd-e3148bffdf4e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.720907] env[61964]: DEBUG nova.compute.claims [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2605.721099] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2605.721324] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2605.741757] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2605.796959] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2605.857305] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2605.857507] env[61964]: DEBUG oslo_vmware.rw_handles [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2605.945790] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd15712-921c-4642-b375-29515c218d63 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.953045] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959069f0-9e5d-4419-b8ea-eb36fd69cd9b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.983183] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2abc0c0-ba04-4d43-9557-5598ff7400d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2605.990254] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c5e9ba-4750-47cf-a06b-4134dd77bc55 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.003192] env[61964]: DEBUG nova.compute.provider_tree [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2606.011261] env[61964]: DEBUG nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2606.027384] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2606.027907] env[61964]: ERROR nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2606.027907] env[61964]: Faults: ['InvalidArgument'] [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Traceback (most recent call last): [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self.driver.spawn(context, instance, image_meta, [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self._fetch_image_if_missing(context, vi) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] image_cache(vi, tmp_image_ds_loc) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] vm_util.copy_virtual_disk( [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] session._wait_for_task(vmdk_copy_task) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return self.wait_for_task(task_ref) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return evt.wait() [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] result = hub.switch() [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] return self.greenlet.switch() [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] self.f(*self.args, **self.kw) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] raise exceptions.translate_fault(task_info.error) [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Faults: ['InvalidArgument'] [ 2606.027907] env[61964]: ERROR nova.compute.manager [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] [ 2606.028770] env[61964]: DEBUG nova.compute.utils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2606.030015] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Build of instance bae8f345-41e7-4adb-a44c-d91347fb4c7a was re-scheduled: A specified parameter was not correct: fileType [ 2606.030015] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2606.030404] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2606.030566] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2606.030734] env[61964]: DEBUG nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2606.030893] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2606.332716] env[61964]: DEBUG nova.network.neutron [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2606.342890] env[61964]: INFO nova.compute.manager [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Took 0.31 seconds to deallocate network for instance. [ 2606.515394] env[61964]: INFO nova.scheduler.client.report [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Deleted allocations for instance bae8f345-41e7-4adb-a44c-d91347fb4c7a [ 2606.538969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f4cd3fbf-67a5-415e-a1c7-534ee910eada tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 550.565s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2606.539215] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 354.412s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2606.539425] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Acquiring lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2606.539624] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2606.539790] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2606.541641] env[61964]: INFO nova.compute.manager [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Terminating instance [ 2606.543332] env[61964]: DEBUG nova.compute.manager [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2606.543520] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2606.543974] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf9562ad-a112-450a-b2b8-6c010f6960a7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.553714] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9e9e44-3411-441a-a5ce-0b956ea685cb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.580995] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bae8f345-41e7-4adb-a44c-d91347fb4c7a could not be found. [ 2606.581214] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2606.581387] env[61964]: INFO nova.compute.manager [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2606.581624] env[61964]: DEBUG oslo.service.loopingcall [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2606.581854] env[61964]: DEBUG nova.compute.manager [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2606.581947] env[61964]: DEBUG nova.network.neutron [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2606.604725] env[61964]: DEBUG nova.network.neutron [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2606.612500] env[61964]: INFO nova.compute.manager [-] [instance: bae8f345-41e7-4adb-a44c-d91347fb4c7a] Took 0.03 seconds to deallocate network for instance. [ 2606.707305] env[61964]: DEBUG oslo_concurrency.lockutils [None req-29a3c408-7ae0-4be2-83b8-1fd617c34628 tempest-MultipleCreateTestJSON-85934861 tempest-MultipleCreateTestJSON-85934861-project-member] Lock "bae8f345-41e7-4adb-a44c-d91347fb4c7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2608.384262] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2608.384543] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2608.384658] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2608.394451] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2608.394650] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2608.394785] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2609.397867] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2610.384213] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2610.395768] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2610.396044] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2610.396249] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2610.396436] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2610.397550] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f592dd-90d3-471d-aa36-08609f09096a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.406493] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f0e083-15a1-45d4-ab94-502ffdf7804d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.420228] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de20eb3-c329-4f74-b95f-6280a245a002 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.426528] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90613c59-d3ef-4085-8b4f-a25558c5f4e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.457106] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181278MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2610.457106] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2610.457106] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2610.521843] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance aacff339-acaa-481d-930f-a4e838525cc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522010] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522147] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522270] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522388] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522506] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522622] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522739] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.522854] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2610.523043] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2610.523181] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2610.538725] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2610.551017] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2610.551169] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2610.561392] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2610.578235] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2610.697608] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c8efcd-0b2e-44de-bd90-a0e28845f4b1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.705727] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b8b32a-3ca7-4516-a65c-096b6dfe5772 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.734911] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff675e6e-38f4-4985-b29e-90093e9bf5f6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.741907] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806b587c-6c75-4791-9244-47dd53425329 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.754543] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2610.763487] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2610.778621] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2610.778806] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.322s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2614.775511] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2620.384270] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2645.552034] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2645.572807] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Getting list of instances from cluster (obj){ [ 2645.572807] env[61964]: value = "domain-c8" [ 2645.572807] env[61964]: _type = "ClusterComputeResource" [ 2645.572807] env[61964]: } {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2645.574129] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4abe472-e7cd-48d5-b4d7-68047ca50cc6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.589776] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Got total of 9 instances {{(pid=61964) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2645.589952] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid aacff339-acaa-481d-930f-a4e838525cc2 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590164] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid f5589fbe-df43-4407-b63a-5e4f96021b61 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590479] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid e6b8146e-6413-4c9b-81b3-07ef6c8719f5 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590479] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 031156ba-251e-4b8b-86bd-9c967adc808f {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590657] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590762] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 7d1977c2-cc88-4964-989a-9258f345c4f2 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.590913] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.591074] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 04451950-2e85-46dd-a516-6b7743e03f7d {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.591230] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Triggering sync for uuid 841fd145-2c83-46a5-be0e-d0c6de409f67 {{(pid=61964) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2645.591554] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "aacff339-acaa-481d-930f-a4e838525cc2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.591973] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "f5589fbe-df43-4407-b63a-5e4f96021b61" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.591973] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "031156ba-251e-4b8b-86bd-9c967adc808f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "7d1977c2-cc88-4964-989a-9258f345c4f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593276] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "04451950-2e85-46dd-a516-6b7743e03f7d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.593506] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "841fd145-2c83-46a5-be0e-d0c6de409f67" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.606920] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.607163] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2645.618156] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2645.668119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2645.668373] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2645.670008] env[61964]: INFO nova.compute.claims [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2645.825643] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73773616-8cd6-4055-98ad-aee9ffb037c0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.833386] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52386e6-a30d-4216-812a-50cefbdf6bcb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.865733] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57c3eb7-e04d-4e23-abfb-f838f1862b59 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.873384] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eea984b-5237-4662-8ac9-a3545aa8a3a2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2645.886602] env[61964]: DEBUG nova.compute.provider_tree [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2645.895800] env[61964]: DEBUG nova.scheduler.client.report [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2645.910837] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2645.911320] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2645.945574] env[61964]: DEBUG nova.compute.utils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2645.946784] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2645.947057] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2645.955162] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2646.003139] env[61964]: DEBUG nova.policy [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c586df6784a4660ab9602e225cceece', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a3f71b0c37437a98325ca4a2f48d46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2646.017250] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2646.044201] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2646.044443] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2646.044599] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2646.044777] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2646.044923] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2646.045138] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2646.045370] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2646.045531] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2646.045697] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2646.045859] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2646.046068] env[61964]: DEBUG nova.virt.hardware [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2646.046959] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3848a9-e5ce-4a54-abfe-cb421284da60 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2646.055565] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96132be3-59f3-4156-81ef-798e7a038751 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2646.315468] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Successfully created port: adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2646.846763] env[61964]: DEBUG nova.compute.manager [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Received event network-vif-plugged-adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2646.847089] env[61964]: DEBUG oslo_concurrency.lockutils [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] Acquiring lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2646.847243] env[61964]: DEBUG oslo_concurrency.lockutils [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2646.847418] env[61964]: DEBUG oslo_concurrency.lockutils [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2646.847580] env[61964]: DEBUG nova.compute.manager [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] No waiting events found dispatching network-vif-plugged-adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2646.847738] env[61964]: WARNING nova.compute.manager [req-eed012a3-bfe9-48eb-aeaa-fe6ef3328443 req-d7c4fa59-ca36-4ce1-8a9c-e283de9d76b8 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Received unexpected event network-vif-plugged-adb1e56c-3902-46d9-b316-be92fa8fab2e for instance with vm_state building and task_state spawning. [ 2646.998552] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Successfully updated port: adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2647.011369] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2647.011531] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2647.011660] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2647.078246] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2647.251930] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Updating instance_info_cache with network_info: [{"id": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "address": "fa:16:3e:64:13:53", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb1e56c-39", "ovs_interfaceid": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2647.264233] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2647.264558] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance network_info: |[{"id": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "address": "fa:16:3e:64:13:53", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb1e56c-39", "ovs_interfaceid": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2647.264967] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:13:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19440099-773e-4a31-b82e-84a4daa5d8fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adb1e56c-3902-46d9-b316-be92fa8fab2e', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2647.273405] env[61964]: DEBUG oslo.service.loopingcall [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2647.273914] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2647.274163] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b030d036-9546-4971-9985-60a8b3917e01 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2647.295408] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2647.295408] env[61964]: value = "task-1688761" [ 2647.295408] env[61964]: _type = "Task" [ 2647.295408] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2647.307692] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688761, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2647.805465] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688761, 'name': CreateVM_Task} progress is 25%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2648.306051] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688761, 'name': CreateVM_Task, 'duration_secs': 0.772545} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2648.306471] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2648.306907] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2648.307084] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2648.307398] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2648.307652] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd6fd34a-7393-49df-9b10-118d49d04ede {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2648.311993] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 2648.311993] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52344331-3fbc-1b2a-3c6c-7f91c3b0b741" [ 2648.311993] env[61964]: _type = "Task" [ 2648.311993] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2648.319813] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52344331-3fbc-1b2a-3c6c-7f91c3b0b741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2648.822028] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2648.822309] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2648.822538] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2648.874594] env[61964]: DEBUG nova.compute.manager [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Received event network-changed-adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2648.874740] env[61964]: DEBUG nova.compute.manager [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Refreshing instance network info cache due to event network-changed-adb1e56c-3902-46d9-b316-be92fa8fab2e. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2648.874892] env[61964]: DEBUG oslo_concurrency.lockutils [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] Acquiring lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2648.875049] env[61964]: DEBUG oslo_concurrency.lockutils [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] Acquired lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2648.875215] env[61964]: DEBUG nova.network.neutron [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Refreshing network info cache for port adb1e56c-3902-46d9-b316-be92fa8fab2e {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2649.414813] env[61964]: DEBUG nova.network.neutron [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Updated VIF entry in instance network info cache for port adb1e56c-3902-46d9-b316-be92fa8fab2e. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2649.415212] env[61964]: DEBUG nova.network.neutron [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Updating instance_info_cache with network_info: [{"id": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "address": "fa:16:3e:64:13:53", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadb1e56c-39", "ovs_interfaceid": "adb1e56c-3902-46d9-b316-be92fa8fab2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2649.426302] env[61964]: DEBUG oslo_concurrency.lockutils [req-d832fa94-cbdd-493c-9cd9-c90297bb73f5 req-a3e205fe-3bef-426a-b117-ff307ebe1d45 service nova] Releasing lock "refresh_cache-3e9d2629-bdab-4d87-8c22-1dc3589138ab" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2651.019455] env[61964]: WARNING oslo_vmware.rw_handles [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2651.019455] env[61964]: ERROR oslo_vmware.rw_handles [ 2651.020281] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2651.021861] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2651.022197] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Copying Virtual Disk [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/736ab184-d5e8-415c-a05f-e618712fc7a6/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2651.022538] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b76effc2-8f2f-430c-baf5-269e1e05ae87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.031566] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for the task: (returnval){ [ 2651.031566] env[61964]: value = "task-1688762" [ 2651.031566] env[61964]: _type = "Task" [ 2651.031566] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2651.039951] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Task: {'id': task-1688762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2651.542077] env[61964]: DEBUG oslo_vmware.exceptions [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2651.542363] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2651.542890] env[61964]: ERROR nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2651.542890] env[61964]: Faults: ['InvalidArgument'] [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] Traceback (most recent call last): [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] yield resources [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self.driver.spawn(context, instance, image_meta, [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self._fetch_image_if_missing(context, vi) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] image_cache(vi, tmp_image_ds_loc) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] vm_util.copy_virtual_disk( [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] session._wait_for_task(vmdk_copy_task) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return self.wait_for_task(task_ref) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return evt.wait() [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] result = hub.switch() [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return self.greenlet.switch() [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self.f(*self.args, **self.kw) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] raise exceptions.translate_fault(task_info.error) [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] Faults: ['InvalidArgument'] [ 2651.542890] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] [ 2651.544097] env[61964]: INFO nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Terminating instance [ 2651.544855] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2651.545091] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2651.545333] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d73b900-026a-4d45-bec6-adfe72e60c2a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.547743] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2651.547931] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2651.548687] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611c7465-5642-483d-9d07-393b2e921a27 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.556265] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2651.556494] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21647bfb-a6ce-4f01-8b98-828e0a921159 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.558768] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2651.558939] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2651.559895] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b33a7e0-0801-499d-81b1-1ce365998d8d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.565026] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for the task: (returnval){ [ 2651.565026] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b2d4af-9bcc-33ff-4089-45d38f69ea3b" [ 2651.565026] env[61964]: _type = "Task" [ 2651.565026] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2651.571590] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b2d4af-9bcc-33ff-4089-45d38f69ea3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2651.638652] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2651.638652] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2651.638652] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Deleting the datastore file [datastore1] aacff339-acaa-481d-930f-a4e838525cc2 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2651.638970] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-034e4f49-1a58-41b1-addf-b9a22af22a11 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.645431] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for the task: (returnval){ [ 2651.645431] env[61964]: value = "task-1688764" [ 2651.645431] env[61964]: _type = "Task" [ 2651.645431] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2651.654636] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Task: {'id': task-1688764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2652.074513] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2652.074905] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Creating directory with path [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2652.075060] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1678970-6c13-49cf-b3bf-73b9b4c65aae {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.086633] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Created directory with path [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2652.086856] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Fetch image to [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2652.086987] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2652.087784] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2eb2c6-29c8-420c-beec-7dc462627c8e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.094554] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eea32e0-a670-47e3-8e6e-7fd4673d57fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.103719] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb44e3d-99dd-432b-b38a-c64fbda18f39 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.134938] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f97ee2-4b25-49fe-a134-030e480511f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.140524] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8f4188e1-defa-4b9e-9616-a73b0c1e10de {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.154392] env[61964]: DEBUG oslo_vmware.api [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Task: {'id': task-1688764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083331} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2652.155061] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2652.155061] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2652.155061] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2652.155319] env[61964]: INFO nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2652.157380] env[61964]: DEBUG nova.compute.claims [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2652.157593] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2652.157862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2652.162313] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2652.220980] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2652.284168] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2652.284168] env[61964]: DEBUG oslo_vmware.rw_handles [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2652.408757] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4146e835-273c-44bd-a50f-1977230d5a32 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.416126] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7029d592-78fb-47ee-8d39-3eafddf7fcdb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.447021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf836723-e28e-4960-92c0-23c10abf40af {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.455015] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1ed777-b494-49ee-a884-4f975da50769 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2652.468461] env[61964]: DEBUG nova.compute.provider_tree [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2652.477175] env[61964]: DEBUG nova.scheduler.client.report [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2652.492860] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2652.493341] env[61964]: ERROR nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2652.493341] env[61964]: Faults: ['InvalidArgument'] [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] Traceback (most recent call last): [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self.driver.spawn(context, instance, image_meta, [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self._fetch_image_if_missing(context, vi) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] image_cache(vi, tmp_image_ds_loc) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] vm_util.copy_virtual_disk( [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] session._wait_for_task(vmdk_copy_task) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return self.wait_for_task(task_ref) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return evt.wait() [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] result = hub.switch() [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] return self.greenlet.switch() [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] self.f(*self.args, **self.kw) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] raise exceptions.translate_fault(task_info.error) [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] Faults: ['InvalidArgument'] [ 2652.493341] env[61964]: ERROR nova.compute.manager [instance: aacff339-acaa-481d-930f-a4e838525cc2] [ 2652.494397] env[61964]: DEBUG nova.compute.utils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2652.495766] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Build of instance aacff339-acaa-481d-930f-a4e838525cc2 was re-scheduled: A specified parameter was not correct: fileType [ 2652.495766] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2652.496181] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2652.496387] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2652.496567] env[61964]: DEBUG nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2652.496748] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2652.955915] env[61964]: DEBUG nova.network.neutron [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2652.980881] env[61964]: INFO nova.compute.manager [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Took 0.48 seconds to deallocate network for instance. [ 2653.098849] env[61964]: INFO nova.scheduler.client.report [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Deleted allocations for instance aacff339-acaa-481d-930f-a4e838525cc2 [ 2653.123582] env[61964]: DEBUG oslo_concurrency.lockutils [None req-fdfe17c9-b562-4aa4-9c59-9c84e411f9d9 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 559.696s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2653.123582] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 363.345s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2653.123832] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Acquiring lock "aacff339-acaa-481d-930f-a4e838525cc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2653.123919] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2653.124097] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2653.126277] env[61964]: INFO nova.compute.manager [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Terminating instance [ 2653.128203] env[61964]: DEBUG nova.compute.manager [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2653.128444] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2653.128995] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1377b11f-6c5c-4412-9ee1-93a3d7bc845c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2653.138971] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39a3ec3-11c4-429d-9e1d-05f06eac7ca8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2653.168608] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aacff339-acaa-481d-930f-a4e838525cc2 could not be found. [ 2653.168828] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2653.169010] env[61964]: INFO nova.compute.manager [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2653.169274] env[61964]: DEBUG oslo.service.loopingcall [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2653.169500] env[61964]: DEBUG nova.compute.manager [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2653.169592] env[61964]: DEBUG nova.network.neutron [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2653.212450] env[61964]: DEBUG nova.network.neutron [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2653.222843] env[61964]: INFO nova.compute.manager [-] [instance: aacff339-acaa-481d-930f-a4e838525cc2] Took 0.05 seconds to deallocate network for instance. [ 2653.312462] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c88878d0-5891-4e34-bfa1-a3595767f7d8 tempest-ListServerFiltersTestJSON-198013788 tempest-ListServerFiltersTestJSON-198013788-project-member] Lock "aacff339-acaa-481d-930f-a4e838525cc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2653.313354] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "aacff339-acaa-481d-930f-a4e838525cc2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.722s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2653.313545] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: aacff339-acaa-481d-930f-a4e838525cc2] During sync_power_state the instance has a pending task (deleting). Skip. [ 2653.313748] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "aacff339-acaa-481d-930f-a4e838525cc2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2656.268523] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "04451950-2e85-46dd-a516-6b7743e03f7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2657.425416] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2657.965095] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2657.965786] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2657.969998] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2657.969998] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2657.980073] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2658.034145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2658.034406] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2658.035868] env[61964]: INFO nova.compute.claims [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2658.204642] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0ae7e2-2481-4bd9-a948-4d729c8ea201 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.213052] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6140cebc-7e67-4515-85f5-c33a86738b00 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.243440] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94b59d7-4786-4eda-9ed9-c21eec14e270 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.250851] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2df7ca6-b1c9-49d8-bfeb-892bfbe220db {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.263861] env[61964]: DEBUG nova.compute.provider_tree [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2658.271799] env[61964]: DEBUG nova.scheduler.client.report [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2658.284646] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.250s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2658.285105] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2658.317195] env[61964]: DEBUG nova.compute.utils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2658.318440] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2658.318608] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2658.327407] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2658.373151] env[61964]: DEBUG nova.policy [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efff025030884fb6867b5f1e40bc1d0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd88f9d7631944ddfaf94cae97af348ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2658.383553] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2658.383706] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2658.383824] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2658.391133] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2658.402783] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.402942] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403110] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403308] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403446] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403568] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403688] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403807] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.403924] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.404052] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2658.404174] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2658.414493] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2658.414717] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2658.414872] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2658.415064] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2658.415341] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2658.415541] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2658.415748] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2658.415911] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2658.416152] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2658.416320] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2658.416499] env[61964]: DEBUG nova.virt.hardware [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2658.417605] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0133c5e0-8814-467c-95f7-c4c944f8772b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.426261] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261baf9c-5c85-4e24-91fd-f2e593ca00f8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.695151] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Successfully created port: a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2659.282733] env[61964]: DEBUG nova.compute.manager [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Received event network-vif-plugged-a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2659.282962] env[61964]: DEBUG oslo_concurrency.lockutils [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] Acquiring lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2659.283186] env[61964]: DEBUG oslo_concurrency.lockutils [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2659.283353] env[61964]: DEBUG oslo_concurrency.lockutils [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2659.283514] env[61964]: DEBUG nova.compute.manager [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] No waiting events found dispatching network-vif-plugged-a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2659.283674] env[61964]: WARNING nova.compute.manager [req-c6389d14-7971-4a36-bfe7-197185d49a3c req-63b7f8cd-3c15-4a2b-aeb8-c9cf15946a1f service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Received unexpected event network-vif-plugged-a09040be-fcc7-46b5-a8d3-69a140490e9f for instance with vm_state building and task_state spawning. [ 2659.359333] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Successfully updated port: a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2659.369649] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2659.369788] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquired lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2659.369930] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2659.411839] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2659.587841] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Updating instance_info_cache with network_info: [{"id": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "address": "fa:16:3e:7e:26:0b", "network": {"id": "6e16af31-b024-4be3-bb8d-b71273408661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-635029015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d88f9d7631944ddfaf94cae97af348ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa09040be-fc", "ovs_interfaceid": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2659.598270] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Releasing lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2659.598552] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance network_info: |[{"id": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "address": "fa:16:3e:7e:26:0b", "network": {"id": "6e16af31-b024-4be3-bb8d-b71273408661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-635029015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d88f9d7631944ddfaf94cae97af348ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa09040be-fc", "ovs_interfaceid": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2659.598932] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:26:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cc30a16-f070-421c-964e-50c9aa32f17a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a09040be-fcc7-46b5-a8d3-69a140490e9f', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2659.606458] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Creating folder: Project (d88f9d7631944ddfaf94cae97af348ca). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2659.606967] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d5c844b-025a-47f9-bb1e-a194677cfefd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.617743] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Created folder: Project (d88f9d7631944ddfaf94cae97af348ca) in parent group-v351942. [ 2659.617923] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Creating folder: Instances. Parent ref: group-v352048. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2659.618157] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bae97c07-137d-4f7e-b32d-b08400123f0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.627353] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Created folder: Instances in parent group-v352048. [ 2659.627573] env[61964]: DEBUG oslo.service.loopingcall [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2659.627746] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2659.627932] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de9aefb1-6716-45ca-a2b0-40e07f0345aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.646746] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2659.646746] env[61964]: value = "task-1688767" [ 2659.646746] env[61964]: _type = "Task" [ 2659.646746] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2659.653855] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688767, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2660.156957] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688767, 'name': CreateVM_Task, 'duration_secs': 0.349402} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2660.157161] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2660.157828] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2660.157988] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2660.158985] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2660.158985] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55f53949-dc49-4c91-98ab-eb049defe81a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2660.162995] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for the task: (returnval){ [ 2660.162995] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d593e3-4d8c-50ee-2135-67e245470adc" [ 2660.162995] env[61964]: _type = "Task" [ 2660.162995] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2660.170335] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d593e3-4d8c-50ee-2135-67e245470adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2660.673460] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2660.673860] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2660.673999] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2661.310876] env[61964]: DEBUG nova.compute.manager [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Received event network-changed-a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2661.311097] env[61964]: DEBUG nova.compute.manager [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Refreshing instance network info cache due to event network-changed-a09040be-fcc7-46b5-a8d3-69a140490e9f. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2661.311313] env[61964]: DEBUG oslo_concurrency.lockutils [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] Acquiring lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2661.311456] env[61964]: DEBUG oslo_concurrency.lockutils [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] Acquired lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2661.311614] env[61964]: DEBUG nova.network.neutron [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Refreshing network info cache for port a09040be-fcc7-46b5-a8d3-69a140490e9f {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2661.383725] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2661.383943] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2661.384161] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2661.544587] env[61964]: DEBUG nova.network.neutron [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Updated VIF entry in instance network info cache for port a09040be-fcc7-46b5-a8d3-69a140490e9f. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2661.544939] env[61964]: DEBUG nova.network.neutron [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Updating instance_info_cache with network_info: [{"id": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "address": "fa:16:3e:7e:26:0b", "network": {"id": "6e16af31-b024-4be3-bb8d-b71273408661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-635029015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d88f9d7631944ddfaf94cae97af348ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cc30a16-f070-421c-964e-50c9aa32f17a", "external-id": "nsx-vlan-transportzone-424", "segmentation_id": 424, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa09040be-fc", "ovs_interfaceid": "a09040be-fcc7-46b5-a8d3-69a140490e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2661.554575] env[61964]: DEBUG oslo_concurrency.lockutils [req-3ad37c6a-634f-412c-8fcb-2b5e1788be9b req-0d1a915d-bb00-4280-a64d-17c313758127 service nova] Releasing lock "refresh_cache-6fe43dac-8e0d-4045-a7d7-5ed685c67a02" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2665.384602] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2666.383089] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2669.380260] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2670.384106] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2672.384646] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2672.396763] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2672.396936] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2672.397115] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2672.397311] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2672.398415] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0433d190-4f38-48ca-b2ef-282697d22b87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.407024] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe0e9cb-0e9a-4d5f-a7e7-0d6b0ec3e8f5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.421516] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe8a4c8-6c17-443c-94cb-87d11f54a132 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.427788] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4287e573-e72b-4f21-99f9-c787fd768e9e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.456396] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181328MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2672.456551] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2672.456745] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2672.528795] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f5589fbe-df43-4407-b63a-5e4f96021b61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.528957] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529095] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529231] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529351] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529470] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529667] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529715] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529828] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.529940] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2672.540927] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2672.541170] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2672.541314] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2672.661378] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf494c9b-8d5d-4388-8b88-a106984036de {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.668393] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d30ea92-488f-4756-b1b4-eb6fd397b3ce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.698323] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c170f00f-f6a3-4c96-bccd-23070e5b58c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.705570] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a08619-b69f-4705-8de8-90e6c4a47d34 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2672.719129] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2672.727512] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2672.741157] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2672.741337] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.285s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2686.890280] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "841fd145-2c83-46a5-be0e-d0c6de409f67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2701.035794] env[61964]: WARNING oslo_vmware.rw_handles [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2701.035794] env[61964]: ERROR oslo_vmware.rw_handles [ 2701.036566] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2701.038439] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2701.038726] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Copying Virtual Disk [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/7943ebce-63ae-48ce-a5b1-e86184efb6fa/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2701.039018] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24bc75e8-24a2-4022-96e6-c6dd9d93a342 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.047346] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for the task: (returnval){ [ 2701.047346] env[61964]: value = "task-1688768" [ 2701.047346] env[61964]: _type = "Task" [ 2701.047346] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2701.055645] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Task: {'id': task-1688768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2701.557756] env[61964]: DEBUG oslo_vmware.exceptions [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2701.557988] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2701.558534] env[61964]: ERROR nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2701.558534] env[61964]: Faults: ['InvalidArgument'] [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Traceback (most recent call last): [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] yield resources [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self.driver.spawn(context, instance, image_meta, [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self._fetch_image_if_missing(context, vi) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] image_cache(vi, tmp_image_ds_loc) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] vm_util.copy_virtual_disk( [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] session._wait_for_task(vmdk_copy_task) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return self.wait_for_task(task_ref) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return evt.wait() [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] result = hub.switch() [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return self.greenlet.switch() [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self.f(*self.args, **self.kw) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] raise exceptions.translate_fault(task_info.error) [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Faults: ['InvalidArgument'] [ 2701.558534] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] [ 2701.559692] env[61964]: INFO nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Terminating instance [ 2701.560371] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2701.560574] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2701.560830] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fc52a29-7dbb-41b2-8c00-f6d93204ab21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.564372] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2701.564591] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2701.565342] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abdd1e6-e509-4de7-93e8-edf6011eacdf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.569434] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2701.569605] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2701.570286] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a999ae5-c7a3-4951-b8c3-8a42fd81028e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.574129] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2701.574652] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad3dd291-6ce7-4344-9834-88188d17aa91 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.577148] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for the task: (returnval){ [ 2701.577148] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52764f40-d413-1304-c435-9a1a05981514" [ 2701.577148] env[61964]: _type = "Task" [ 2701.577148] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2701.586839] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52764f40-d413-1304-c435-9a1a05981514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2701.651686] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2701.651916] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2701.652103] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Deleting the datastore file [datastore1] f5589fbe-df43-4407-b63a-5e4f96021b61 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2701.652376] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6475093e-e008-458e-9cf4-6543f3363fa8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2701.658821] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for the task: (returnval){ [ 2701.658821] env[61964]: value = "task-1688770" [ 2701.658821] env[61964]: _type = "Task" [ 2701.658821] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2701.667982] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Task: {'id': task-1688770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2702.088171] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2702.088558] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Creating directory with path [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2702.088659] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce59bfb4-7e6b-4228-bc62-fa1490751981 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.102681] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Created directory with path [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2702.102920] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Fetch image to [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2702.103163] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2702.103879] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e6b718-b8ab-435a-9b84-814fb11b8344 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.110827] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30639d9-217e-4fcc-a346-7ec966f0489b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.120453] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d762770-c736-490d-bc47-b1bb71a5e68a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.152056] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4204f84-1a09-4ef7-b1e6-7d5079fc9e47 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.158832] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2eea622a-835f-4288-ad6e-0ed72c0e826e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.169496] env[61964]: DEBUG oslo_vmware.api [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Task: {'id': task-1688770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066629} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2702.169745] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2702.169928] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2702.170115] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2702.170287] env[61964]: INFO nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2702.172696] env[61964]: DEBUG nova.compute.claims [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2702.172862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2702.173089] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2702.187833] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2702.248527] env[61964]: DEBUG oslo_vmware.rw_handles [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2702.310185] env[61964]: DEBUG oslo_vmware.rw_handles [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2702.310385] env[61964]: DEBUG oslo_vmware.rw_handles [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2702.428571] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c8ff64-e22e-4fc9-a657-ff30e7de7cd1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.435974] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17489ff-6544-492d-a9e5-0110a9e5d932 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.468552] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b5f250-7756-49dd-a4ed-7fb2cc8f2b4a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.480022] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818a003d-bdab-4ae7-908d-7e1fe4d7c2fa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2702.491425] env[61964]: DEBUG nova.compute.provider_tree [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2702.502134] env[61964]: DEBUG nova.scheduler.client.report [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2702.517705] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.344s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2702.518845] env[61964]: ERROR nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2702.518845] env[61964]: Faults: ['InvalidArgument'] [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Traceback (most recent call last): [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self.driver.spawn(context, instance, image_meta, [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self._fetch_image_if_missing(context, vi) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] image_cache(vi, tmp_image_ds_loc) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] vm_util.copy_virtual_disk( [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] session._wait_for_task(vmdk_copy_task) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return self.wait_for_task(task_ref) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return evt.wait() [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] result = hub.switch() [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] return self.greenlet.switch() [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] self.f(*self.args, **self.kw) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] raise exceptions.translate_fault(task_info.error) [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Faults: ['InvalidArgument'] [ 2702.518845] env[61964]: ERROR nova.compute.manager [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] [ 2702.520939] env[61964]: DEBUG nova.compute.utils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2702.523117] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Build of instance f5589fbe-df43-4407-b63a-5e4f96021b61 was re-scheduled: A specified parameter was not correct: fileType [ 2702.523117] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2702.523904] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2702.524240] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2702.527018] env[61964]: DEBUG nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2702.527018] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2702.886148] env[61964]: DEBUG nova.network.neutron [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2702.903577] env[61964]: INFO nova.compute.manager [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Took 0.38 seconds to deallocate network for instance. [ 2703.003606] env[61964]: INFO nova.scheduler.client.report [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Deleted allocations for instance f5589fbe-df43-4407-b63a-5e4f96021b61 [ 2703.030593] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3e32a2c-e00f-4d16-b2e6-33173076d586 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 515.062s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2703.031777] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 318.856s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2703.032019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Acquiring lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2703.032255] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2703.032429] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2703.034559] env[61964]: INFO nova.compute.manager [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Terminating instance [ 2703.036269] env[61964]: DEBUG nova.compute.manager [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2703.036459] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2703.036947] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca4da80f-65ea-43a5-b0c1-7d33977da5e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.045824] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6840d4da-a8aa-4ee8-9e55-2a585eb856b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.056558] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2703.078854] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5589fbe-df43-4407-b63a-5e4f96021b61 could not be found. [ 2703.079142] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2703.079254] env[61964]: INFO nova.compute.manager [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2703.079499] env[61964]: DEBUG oslo.service.loopingcall [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2703.079744] env[61964]: DEBUG nova.compute.manager [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2703.079840] env[61964]: DEBUG nova.network.neutron [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2703.105693] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2703.105992] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2703.107525] env[61964]: INFO nova.compute.claims [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2703.110289] env[61964]: DEBUG nova.network.neutron [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2703.118831] env[61964]: INFO nova.compute.manager [-] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] Took 0.04 seconds to deallocate network for instance. [ 2703.217581] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1ecf0eb3-4388-4c13-8ea2-d19f92ba6593 tempest-ServerGroupTestJSON-553633348 tempest-ServerGroupTestJSON-553633348-project-member] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2703.218541] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 57.627s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2703.218737] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f5589fbe-df43-4407-b63a-5e4f96021b61] During sync_power_state the instance has a pending task (deleting). Skip. [ 2703.218936] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "f5589fbe-df43-4407-b63a-5e4f96021b61" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2703.283746] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa15df0-bedf-425a-b5de-d10f3233b935 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.291708] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0b1ccd-6e6b-4c9a-84fe-ac43820e1139 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.320485] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dccb687-b5bf-46e3-8d5b-ff627ae698c4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.327199] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbfd98dc-9291-4cef-9651-7f243bcf8324 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.339637] env[61964]: DEBUG nova.compute.provider_tree [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2703.348526] env[61964]: DEBUG nova.scheduler.client.report [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2703.362584] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.257s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2703.363048] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2703.397983] env[61964]: DEBUG nova.compute.utils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2703.398103] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2703.398266] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2703.407432] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2703.462920] env[61964]: DEBUG nova.policy [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2a54aa10d9490c9824f342bda68aaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ce335cfa25f4c0aa494bd2c87dda282', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2703.472441] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2703.498058] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2703.498321] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2703.498481] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2703.498671] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2703.498837] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2703.498993] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2703.499233] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2703.499408] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2703.499588] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2703.499764] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2703.499934] env[61964]: DEBUG nova.virt.hardware [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2703.500839] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142f7e1c-bd26-477a-9cc1-bd97b2b9b1f4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.510209] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa4ee5f-08ec-4c4a-ad3b-8d657949effa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2703.804272] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Successfully created port: 53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2704.369292] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Successfully updated port: 53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2704.380347] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2704.380507] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2704.380666] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2704.420492] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2704.574935] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Updating instance_info_cache with network_info: [{"id": "53327193-de5f-408e-b3b6-02818df7b9d8", "address": "fa:16:3e:c6:32:64", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53327193-de", "ovs_interfaceid": "53327193-de5f-408e-b3b6-02818df7b9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2704.587636] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2704.587927] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance network_info: |[{"id": "53327193-de5f-408e-b3b6-02818df7b9d8", "address": "fa:16:3e:c6:32:64", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53327193-de", "ovs_interfaceid": "53327193-de5f-408e-b3b6-02818df7b9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2704.588347] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:32:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53327193-de5f-408e-b3b6-02818df7b9d8', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2704.595697] env[61964]: DEBUG oslo.service.loopingcall [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2704.596310] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2704.596411] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fecf3eb2-650d-4991-9064-13f8cafd5f2e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2704.616940] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2704.616940] env[61964]: value = "task-1688771" [ 2704.616940] env[61964]: _type = "Task" [ 2704.616940] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2704.624766] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688771, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2704.927126] env[61964]: DEBUG nova.compute.manager [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Received event network-vif-plugged-53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2704.927437] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Acquiring lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2704.927548] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2704.927720] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2704.927883] env[61964]: DEBUG nova.compute.manager [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] No waiting events found dispatching network-vif-plugged-53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2704.928060] env[61964]: WARNING nova.compute.manager [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Received unexpected event network-vif-plugged-53327193-de5f-408e-b3b6-02818df7b9d8 for instance with vm_state building and task_state spawning. [ 2704.928220] env[61964]: DEBUG nova.compute.manager [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Received event network-changed-53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2704.928374] env[61964]: DEBUG nova.compute.manager [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Refreshing instance network info cache due to event network-changed-53327193-de5f-408e-b3b6-02818df7b9d8. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2704.928554] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Acquiring lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2704.928689] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Acquired lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2704.928840] env[61964]: DEBUG nova.network.neutron [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Refreshing network info cache for port 53327193-de5f-408e-b3b6-02818df7b9d8 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2705.126355] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688771, 'name': CreateVM_Task, 'duration_secs': 0.291983} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2705.126547] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2705.135805] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2705.135968] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2705.136310] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2705.136598] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4713c85-1a9f-46d6-ae78-c62f5ebf4a8a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.140882] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 2705.140882] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524b1a6b-cd5a-cc3b-56de-e749e218b6a7" [ 2705.140882] env[61964]: _type = "Task" [ 2705.140882] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2705.148236] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524b1a6b-cd5a-cc3b-56de-e749e218b6a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2705.169674] env[61964]: DEBUG nova.network.neutron [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Updated VIF entry in instance network info cache for port 53327193-de5f-408e-b3b6-02818df7b9d8. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2705.169990] env[61964]: DEBUG nova.network.neutron [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Updating instance_info_cache with network_info: [{"id": "53327193-de5f-408e-b3b6-02818df7b9d8", "address": "fa:16:3e:c6:32:64", "network": {"id": "fc1c1a44-a704-4482-a74c-2f3f03a5ca6c", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1683801677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ce335cfa25f4c0aa494bd2c87dda282", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53327193-de", "ovs_interfaceid": "53327193-de5f-408e-b3b6-02818df7b9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2705.179319] env[61964]: DEBUG oslo_concurrency.lockutils [req-f40754cb-56df-42c5-96a4-952f86ffd8b8 req-50cb7186-010e-4a27-b71f-713300254396 service nova] Releasing lock "refresh_cache-e717f146-dd0a-4285-810f-8f9cc7ffaa6e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2705.652273] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2705.652273] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2705.652273] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2717.741644] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2720.384107] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2720.384389] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2720.384428] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2720.408418] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.408695] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.408941] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.409207] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.409458] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.409695] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.409936] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.410197] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.410447] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.410691] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2720.410921] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2721.384027] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2721.384181] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2723.384404] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2725.384651] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2727.384210] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2729.379291] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2731.383584] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2733.384155] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2733.396053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2733.396286] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2733.396448] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2733.396603] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2733.397789] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afecb78d-6983-4825-8fed-ccf5972fd8f7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.406528] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80704f30-bacb-4329-9e2a-44706d6b1e3d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.420173] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88759a9-882e-4fd4-9b36-24a4fd3c9242 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.426207] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a014e1a0-3f2e-4f20-a6ee-ab70ee4993c1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.455595] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181371MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2733.455707] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2733.455874] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2733.524549] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.524734] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.524916] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525062] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525186] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525304] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525438] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525575] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525691] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525803] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2733.525992] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2733.526208] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2733.652745] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff994baf-1849-4d45-8eb6-bf65322c2740 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.660334] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0fed52-f4c5-47cd-bd2c-9b6d725b2a78 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.689295] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554243df-2b19-48fd-994e-a46e5429e8b8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.696076] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b64546-759e-4b25-8937-3ddcb313ce69 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2733.708597] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2733.717018] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2733.729867] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2733.730049] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.274s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2738.726094] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.989330] env[61964]: WARNING oslo_vmware.rw_handles [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2750.989330] env[61964]: ERROR oslo_vmware.rw_handles [ 2750.990164] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2750.991971] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2750.992229] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Copying Virtual Disk [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/7f4ecd09-5e69-4fdc-aa52-e395ecf4abbe/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2750.992537] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb774cd1-c067-403d-b27c-98f6e0d409e9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.001707] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for the task: (returnval){ [ 2751.001707] env[61964]: value = "task-1688772" [ 2751.001707] env[61964]: _type = "Task" [ 2751.001707] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2751.009859] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Task: {'id': task-1688772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2751.511663] env[61964]: DEBUG oslo_vmware.exceptions [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2751.511953] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2751.512501] env[61964]: ERROR nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2751.512501] env[61964]: Faults: ['InvalidArgument'] [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Traceback (most recent call last): [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] yield resources [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self.driver.spawn(context, instance, image_meta, [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self._fetch_image_if_missing(context, vi) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] image_cache(vi, tmp_image_ds_loc) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] vm_util.copy_virtual_disk( [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] session._wait_for_task(vmdk_copy_task) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return self.wait_for_task(task_ref) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return evt.wait() [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] result = hub.switch() [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return self.greenlet.switch() [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self.f(*self.args, **self.kw) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] raise exceptions.translate_fault(task_info.error) [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Faults: ['InvalidArgument'] [ 2751.512501] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] [ 2751.513604] env[61964]: INFO nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Terminating instance [ 2751.514341] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2751.514546] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2751.514778] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-104040d4-3587-47ad-8835-a67a011f8e42 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.516754] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2751.516909] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2751.517107] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2751.524786] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2751.524995] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2751.525724] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f0e4611-1a23-48c8-9b74-102938afab68 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.533260] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 2751.533260] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525598df-9a20-0f0b-34cd-95bfef2286eb" [ 2751.533260] env[61964]: _type = "Task" [ 2751.533260] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2751.542180] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525598df-9a20-0f0b-34cd-95bfef2286eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2751.599069] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2751.664693] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2751.673911] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Releasing lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2751.674378] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2751.674572] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2751.675680] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36b62fd-cbcd-4fa7-8ce2-58b61c2131bf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.683253] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2751.683478] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df071990-69f7-4c4e-8160-159cb4d8f8bf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.716118] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2751.716331] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2751.716508] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Deleting the datastore file [datastore1] e6b8146e-6413-4c9b-81b3-07ef6c8719f5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2751.716754] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-177d49d3-3efe-4d39-91f4-26f77a82c720 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2751.722847] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for the task: (returnval){ [ 2751.722847] env[61964]: value = "task-1688774" [ 2751.722847] env[61964]: _type = "Task" [ 2751.722847] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2751.730545] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Task: {'id': task-1688774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2752.044296] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2752.044588] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2752.044786] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b3b0e55-d2ab-48ce-961b-a816a3159962 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.055694] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2752.055883] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Fetch image to [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2752.056076] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2752.056809] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ade134-3c53-41ab-89b4-73da9c4b8602 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.063249] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a48c8c-14a3-4f9f-858e-96d175127435 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.072011] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4ff837-95d2-4818-a0db-f825f7db8ac1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.102872] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbb1ecd-4f1c-4732-bac1-3344faacdd4b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.108022] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-425b73ca-3696-48f6-bebf-93c1d1ffa300 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.127797] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2752.175887] env[61964]: DEBUG oslo_vmware.rw_handles [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2752.237221] env[61964]: DEBUG oslo_vmware.rw_handles [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2752.237441] env[61964]: DEBUG oslo_vmware.rw_handles [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2752.241175] env[61964]: DEBUG oslo_vmware.api [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Task: {'id': task-1688774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034698} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2752.241413] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2752.241611] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2752.241795] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2752.241973] env[61964]: INFO nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Took 0.57 seconds to destroy the instance on the hypervisor. [ 2752.242245] env[61964]: DEBUG oslo.service.loopingcall [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2752.242445] env[61964]: DEBUG nova.compute.manager [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network deallocation for instance since networking was not requested. {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 2752.244609] env[61964]: DEBUG nova.compute.claims [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2752.244823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2752.245035] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2752.403414] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9186088c-4118-400c-9bc1-ada13bb05c12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.410628] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a690ed6-b252-44a4-bb6e-71d9af2222b0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.439850] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c067ecc-591d-4ab0-b45f-53c0920920eb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.446641] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231a8922-d180-4204-af73-e94c5f2d86e5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.459014] env[61964]: DEBUG nova.compute.provider_tree [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2752.467336] env[61964]: DEBUG nova.scheduler.client.report [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2752.481061] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.236s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2752.481594] env[61964]: ERROR nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2752.481594] env[61964]: Faults: ['InvalidArgument'] [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Traceback (most recent call last): [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self.driver.spawn(context, instance, image_meta, [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self._fetch_image_if_missing(context, vi) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] image_cache(vi, tmp_image_ds_loc) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] vm_util.copy_virtual_disk( [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] session._wait_for_task(vmdk_copy_task) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return self.wait_for_task(task_ref) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return evt.wait() [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] result = hub.switch() [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] return self.greenlet.switch() [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] self.f(*self.args, **self.kw) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] raise exceptions.translate_fault(task_info.error) [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Faults: ['InvalidArgument'] [ 2752.481594] env[61964]: ERROR nova.compute.manager [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] [ 2752.482602] env[61964]: DEBUG nova.compute.utils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2752.483658] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Build of instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 was re-scheduled: A specified parameter was not correct: fileType [ 2752.483658] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2752.484038] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2752.484262] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2752.484407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2752.484563] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2752.510146] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2752.582195] env[61964]: DEBUG nova.network.neutron [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2752.590477] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Releasing lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2752.590693] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2752.590870] env[61964]: DEBUG nova.compute.manager [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Skipping network deallocation for instance since networking was not requested. {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 2752.677925] env[61964]: INFO nova.scheduler.client.report [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Deleted allocations for instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 [ 2752.696745] env[61964]: DEBUG oslo_concurrency.lockutils [None req-372f1f62-4ced-4016-993b-89b8e5be6aa4 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 527.507s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2752.697018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 332.112s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2752.697254] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2752.697467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2752.697630] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2752.699544] env[61964]: INFO nova.compute.manager [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Terminating instance [ 2752.701338] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquiring lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2752.701501] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Acquired lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2752.701641] env[61964]: DEBUG nova.network.neutron [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2752.727928] env[61964]: DEBUG nova.network.neutron [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2752.790126] env[61964]: DEBUG nova.network.neutron [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2752.799306] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Releasing lock "refresh_cache-e6b8146e-6413-4c9b-81b3-07ef6c8719f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2752.799711] env[61964]: DEBUG nova.compute.manager [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2752.799900] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2752.800420] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-704fdaa7-130e-40cc-affd-d27bd29660b1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.809377] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479f69cb-6224-4ce0-aaa2-c3e12afb2123 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2752.836783] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6b8146e-6413-4c9b-81b3-07ef6c8719f5 could not be found. [ 2752.836874] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2752.837149] env[61964]: INFO nova.compute.manager [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2752.837404] env[61964]: DEBUG oslo.service.loopingcall [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2752.837653] env[61964]: DEBUG nova.compute.manager [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2752.837750] env[61964]: DEBUG nova.network.neutron [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2752.857509] env[61964]: DEBUG nova.network.neutron [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2752.865685] env[61964]: DEBUG nova.network.neutron [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2752.873766] env[61964]: INFO nova.compute.manager [-] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] Took 0.04 seconds to deallocate network for instance. [ 2752.960753] env[61964]: DEBUG oslo_concurrency.lockutils [None req-593a1f4f-d0d6-4cfa-9643-8fd5f9ed8da9 tempest-ServersAaction247Test-1483190775 tempest-ServersAaction247Test-1483190775-project-member] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.264s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2752.961945] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.370s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2752.961945] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e6b8146e-6413-4c9b-81b3-07ef6c8719f5] During sync_power_state the instance has a pending task (deleting). Skip. [ 2752.962125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "e6b8146e-6413-4c9b-81b3-07ef6c8719f5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2769.445423] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2769.445733] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2769.456819] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2769.509864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2769.510139] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2769.511685] env[61964]: INFO nova.compute.claims [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2769.704464] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bd3dc9-786e-4b27-a872-9dcfa570dc45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.712671] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79229b6-2c3f-4d97-8c38-0722efce5847 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.741632] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8aa5246-9403-4bbf-a647-fad7c7bfcc14 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.748558] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6034ff-d377-4c2c-b41b-25ac9f7017b8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.761517] env[61964]: DEBUG nova.compute.provider_tree [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2769.770315] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2769.791611] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2769.792145] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2769.827563] env[61964]: DEBUG nova.compute.utils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2769.828839] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2769.829013] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2769.837581] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2769.884461] env[61964]: DEBUG nova.policy [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7981865f3f78481596c8f583e60a33bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79737fa866ad4f68868ef82bb0f42b18', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2769.900020] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2769.924300] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2769.924600] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2769.924797] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2769.925046] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2769.925198] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2769.925377] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2769.925630] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2769.925835] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2769.926044] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2769.926218] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2769.926395] env[61964]: DEBUG nova.virt.hardware [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2769.927304] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485f8d94-93b9-41b3-b47b-829e65cbacb7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2769.934843] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b231e-e953-4e7f-983b-f7de589f9879 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2770.457668] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Successfully created port: 980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2771.041216] env[61964]: DEBUG nova.compute.manager [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Received event network-vif-plugged-980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2771.041451] env[61964]: DEBUG oslo_concurrency.lockutils [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] Acquiring lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2771.042273] env[61964]: DEBUG oslo_concurrency.lockutils [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2771.042499] env[61964]: DEBUG oslo_concurrency.lockutils [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2771.042654] env[61964]: DEBUG nova.compute.manager [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] No waiting events found dispatching network-vif-plugged-980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2771.042820] env[61964]: WARNING nova.compute.manager [req-8c7c0269-7cc9-4b01-9b37-7e8c30d08d6f req-2bcd9652-3c3f-4017-8cd6-674ea3e257eb service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Received unexpected event network-vif-plugged-980028f4-80b0-4576-bf12-c9d153a6566a for instance with vm_state building and task_state spawning. [ 2771.089750] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Successfully updated port: 980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2771.102050] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2771.102205] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquired lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2771.102354] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2771.138732] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2771.347357] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Updating instance_info_cache with network_info: [{"id": "980028f4-80b0-4576-bf12-c9d153a6566a", "address": "fa:16:3e:0a:3e:46", "network": {"id": "6740fc39-0de3-4216-bc50-687cc8326809", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060886478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79737fa866ad4f68868ef82bb0f42b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee43879-c0b2-47f7-80d0-2c86e3d6d8b5", "external-id": "nsx-vlan-transportzone-151", "segmentation_id": 151, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980028f4-80", "ovs_interfaceid": "980028f4-80b0-4576-bf12-c9d153a6566a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2771.359579] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Releasing lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2771.359881] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance network_info: |[{"id": "980028f4-80b0-4576-bf12-c9d153a6566a", "address": "fa:16:3e:0a:3e:46", "network": {"id": "6740fc39-0de3-4216-bc50-687cc8326809", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060886478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79737fa866ad4f68868ef82bb0f42b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee43879-c0b2-47f7-80d0-2c86e3d6d8b5", "external-id": "nsx-vlan-transportzone-151", "segmentation_id": 151, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980028f4-80", "ovs_interfaceid": "980028f4-80b0-4576-bf12-c9d153a6566a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2771.360311] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:3e:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ee43879-c0b2-47f7-80d0-2c86e3d6d8b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '980028f4-80b0-4576-bf12-c9d153a6566a', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2771.368051] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Creating folder: Project (79737fa866ad4f68868ef82bb0f42b18). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2771.368600] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e58b0d49-b292-436e-b4c3-0c02a03856b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.383563] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Created folder: Project (79737fa866ad4f68868ef82bb0f42b18) in parent group-v351942. [ 2771.383752] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Creating folder: Instances. Parent ref: group-v352052. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2771.384226] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1997deed-5c89-4b79-9e97-d9569cb1951e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.395221] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Created folder: Instances in parent group-v352052. [ 2771.395448] env[61964]: DEBUG oslo.service.loopingcall [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2771.395628] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2771.395824] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fe9a845-e94b-4aa7-925a-3987b702e5ba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.414979] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2771.414979] env[61964]: value = "task-1688777" [ 2771.414979] env[61964]: _type = "Task" [ 2771.414979] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2771.422600] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688777, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2771.925441] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688777, 'name': CreateVM_Task, 'duration_secs': 0.281999} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2771.925687] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2771.926406] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2771.926607] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2771.926980] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2771.927279] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-384f4cfd-1ad9-43ed-b63b-39d51adae9e6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2771.931862] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for the task: (returnval){ [ 2771.931862] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52eec35b-a8e2-e4bf-e408-6aa2afddcdbe" [ 2771.931862] env[61964]: _type = "Task" [ 2771.931862] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2771.945101] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52eec35b-a8e2-e4bf-e408-6aa2afddcdbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2772.442668] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2772.442965] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2772.443207] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2773.067933] env[61964]: DEBUG nova.compute.manager [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Received event network-changed-980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2773.068217] env[61964]: DEBUG nova.compute.manager [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Refreshing instance network info cache due to event network-changed-980028f4-80b0-4576-bf12-c9d153a6566a. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2773.068414] env[61964]: DEBUG oslo_concurrency.lockutils [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] Acquiring lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2773.068523] env[61964]: DEBUG oslo_concurrency.lockutils [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] Acquired lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2773.068674] env[61964]: DEBUG nova.network.neutron [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Refreshing network info cache for port 980028f4-80b0-4576-bf12-c9d153a6566a {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2773.342873] env[61964]: DEBUG nova.network.neutron [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Updated VIF entry in instance network info cache for port 980028f4-80b0-4576-bf12-c9d153a6566a. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2773.343240] env[61964]: DEBUG nova.network.neutron [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Updating instance_info_cache with network_info: [{"id": "980028f4-80b0-4576-bf12-c9d153a6566a", "address": "fa:16:3e:0a:3e:46", "network": {"id": "6740fc39-0de3-4216-bc50-687cc8326809", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060886478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79737fa866ad4f68868ef82bb0f42b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ee43879-c0b2-47f7-80d0-2c86e3d6d8b5", "external-id": "nsx-vlan-transportzone-151", "segmentation_id": 151, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap980028f4-80", "ovs_interfaceid": "980028f4-80b0-4576-bf12-c9d153a6566a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2773.352387] env[61964]: DEBUG oslo_concurrency.lockutils [req-ba7afa63-a545-4c20-9978-b059dae01f2f req-fd46267b-d170-4674-aaae-daaa089bf7ff service nova] Releasing lock "refresh_cache-3352530a-f799-4a76-9e18-86ab1bd96d2e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2777.383660] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.384329] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.384801] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2780.384801] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2780.413390] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.413571] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.413668] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.413764] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.413891] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414010] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414132] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414253] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414374] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414491] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2780.414608] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2783.384283] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2783.384675] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2785.384627] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2786.383709] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2787.384421] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2791.378714] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2792.384458] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2793.383632] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2793.396540] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2793.396540] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2793.396540] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2793.396540] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2793.397721] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16fbdd96-f7c1-4858-9f09-c6369c3cb8cf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.406886] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57f89bd-96d5-4d5f-8bd0-b9dc48d9aaee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.421029] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11228a10-d435-4ded-8927-2d6f33e8e25f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.427392] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1a0b88-4396-42d5-a7dc-72446c9c4852 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.457267] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2793.457420] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2793.457614] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2793.546576] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 031156ba-251e-4b8b-86bd-9c967adc808f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.546749] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.546878] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547007] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547146] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547263] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547404] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547523] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547643] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547754] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2793.547935] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2793.548090] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2793.666231] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0db48b5-62a0-47f7-8f3a-d67b3c8ddc1f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.673812] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ec7ab3-319f-4a9e-8c50-dda12d993553 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.702180] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847b9619-b385-4e35-a85d-d7bddb11ecb7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.708928] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf71abe-6226-4ae7-828b-37a513b2ef16 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2793.723547] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2793.732286] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2793.745774] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2793.745960] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.288s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2797.395431] env[61964]: WARNING oslo_vmware.rw_handles [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2797.395431] env[61964]: ERROR oslo_vmware.rw_handles [ 2797.395883] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2797.398165] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2797.398534] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Copying Virtual Disk [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/e12e96fb-c8c2-4f5f-a053-ace2fdeeae11/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2797.398813] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-208a187d-627c-4af9-8b65-2c8e5d586337 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.407257] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 2797.407257] env[61964]: value = "task-1688778" [ 2797.407257] env[61964]: _type = "Task" [ 2797.407257] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2797.415634] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2797.918247] env[61964]: DEBUG oslo_vmware.exceptions [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2797.918560] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2797.919118] env[61964]: ERROR nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2797.919118] env[61964]: Faults: ['InvalidArgument'] [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Traceback (most recent call last): [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] yield resources [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self.driver.spawn(context, instance, image_meta, [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self._fetch_image_if_missing(context, vi) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] image_cache(vi, tmp_image_ds_loc) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] vm_util.copy_virtual_disk( [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] session._wait_for_task(vmdk_copy_task) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return self.wait_for_task(task_ref) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return evt.wait() [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] result = hub.switch() [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return self.greenlet.switch() [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self.f(*self.args, **self.kw) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] raise exceptions.translate_fault(task_info.error) [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Faults: ['InvalidArgument'] [ 2797.919118] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] [ 2797.919781] env[61964]: INFO nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Terminating instance [ 2797.920905] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2797.921127] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2797.921360] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0047766-4fd9-4393-a876-0b6d1934a2c4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.923436] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2797.923626] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2797.924342] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2169a8-ee17-4fa2-b2c6-4052df24b32e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.931339] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2797.932393] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-152e34bb-0f53-4570-bef1-5c8b034b4106 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.933740] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2797.933910] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2797.934573] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cda21be3-6771-48ff-873a-6da750cf55b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2797.940043] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2797.940043] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5a480-f1e1-ee57-0aac-a1713a295e67" [ 2797.940043] env[61964]: _type = "Task" [ 2797.940043] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2797.947297] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5a480-f1e1-ee57-0aac-a1713a295e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2798.450598] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2798.450919] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating directory with path [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2798.451080] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76fca5b0-d23c-4e26-b5ce-6de8e988ab50 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.471906] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Created directory with path [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2798.472125] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Fetch image to [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2798.472296] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2798.473094] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861e187f-f2d4-4001-8b00-9fd735832ab5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.476553] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2798.476744] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2798.476910] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleting the datastore file [datastore1] 031156ba-251e-4b8b-86bd-9c967adc808f {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2798.477479] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65d63ce6-99a2-4fa7-9327-1534b43e053c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.481955] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2d4bff-0c4b-46ba-8f49-4b878cb89420 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.485934] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 2798.485934] env[61964]: value = "task-1688780" [ 2798.485934] env[61964]: _type = "Task" [ 2798.485934] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2798.493758] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af48ac9-0ab3-4b9b-9a4c-52cade0cf446 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.501885] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2798.527059] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db152ed-0f63-497c-86c2-ed19b1f16dcb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.532414] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fbca56cc-2879-4691-b803-656c8fa73007 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2798.552160] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2798.703157] env[61964]: DEBUG oslo_vmware.rw_handles [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2798.762594] env[61964]: DEBUG oslo_vmware.rw_handles [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2798.762794] env[61964]: DEBUG oslo_vmware.rw_handles [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2799.000060] env[61964]: DEBUG oslo_vmware.api [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076785} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2799.000350] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2799.000539] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2799.000721] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2799.000901] env[61964]: INFO nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2799.003015] env[61964]: DEBUG nova.compute.claims [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2799.003220] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2799.003413] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2799.160607] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1e2f3c-67d5-4d0f-bfa0-f5d80d633b5a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.167933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c72f76-7af4-48ad-8ae4-297add302c92 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.197244] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b79245-79d9-4d35-b17c-a23e2c9bf451 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.204373] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9115c85-2d45-409d-b5eb-0cb15f9e9737 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.217228] env[61964]: DEBUG nova.compute.provider_tree [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2799.225481] env[61964]: DEBUG nova.scheduler.client.report [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2799.239877] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.236s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2799.240404] env[61964]: ERROR nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2799.240404] env[61964]: Faults: ['InvalidArgument'] [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Traceback (most recent call last): [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self.driver.spawn(context, instance, image_meta, [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self._fetch_image_if_missing(context, vi) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] image_cache(vi, tmp_image_ds_loc) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] vm_util.copy_virtual_disk( [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] session._wait_for_task(vmdk_copy_task) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return self.wait_for_task(task_ref) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return evt.wait() [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] result = hub.switch() [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] return self.greenlet.switch() [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] self.f(*self.args, **self.kw) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] raise exceptions.translate_fault(task_info.error) [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Faults: ['InvalidArgument'] [ 2799.240404] env[61964]: ERROR nova.compute.manager [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] [ 2799.241198] env[61964]: DEBUG nova.compute.utils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2799.242481] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Build of instance 031156ba-251e-4b8b-86bd-9c967adc808f was re-scheduled: A specified parameter was not correct: fileType [ 2799.242481] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2799.242838] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2799.243015] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2799.243193] env[61964]: DEBUG nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2799.243355] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2799.594105] env[61964]: DEBUG nova.network.neutron [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2799.605188] env[61964]: INFO nova.compute.manager [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Took 0.36 seconds to deallocate network for instance. [ 2799.709693] env[61964]: INFO nova.scheduler.client.report [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleted allocations for instance 031156ba-251e-4b8b-86bd-9c967adc808f [ 2799.739019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-96aba5ed-3fd5-4238-8a4f-617c0c8a5a61 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 546.966s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2799.739019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 350.706s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2799.739019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2799.739019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2799.739019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2799.740645] env[61964]: INFO nova.compute.manager [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Terminating instance [ 2799.742671] env[61964]: DEBUG nova.compute.manager [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2799.743039] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2799.743777] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2386519-ff8e-4402-ba7a-478a1711a482 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.755848] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d702cc8-ae21-4c20-9bf4-a6eba3001fa1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2799.785028] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 031156ba-251e-4b8b-86bd-9c967adc808f could not be found. [ 2799.785158] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2799.785349] env[61964]: INFO nova.compute.manager [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2799.785596] env[61964]: DEBUG oslo.service.loopingcall [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2799.785835] env[61964]: DEBUG nova.compute.manager [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2799.785928] env[61964]: DEBUG nova.network.neutron [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2799.808603] env[61964]: DEBUG nova.network.neutron [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2799.816263] env[61964]: INFO nova.compute.manager [-] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] Took 0.03 seconds to deallocate network for instance. [ 2799.899250] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d307e203-3828-4007-bc9c-40022ddbe177 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2799.900064] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 154.308s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2799.900249] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 031156ba-251e-4b8b-86bd-9c967adc808f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2799.900423] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "031156ba-251e-4b8b-86bd-9c967adc808f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2839.747360] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.384653] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2841.384940] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2841.384940] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2841.404273] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.404409] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.404533] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.404659] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.404781] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.404900] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.405027] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.405151] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.405272] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2841.405394] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2841.574308] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2845.383693] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2845.384088] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2845.384170] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2846.384624] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2848.043687] env[61964]: WARNING oslo_vmware.rw_handles [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2848.043687] env[61964]: ERROR oslo_vmware.rw_handles [ 2848.044417] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2848.046127] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2848.046397] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Copying Virtual Disk [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/b5e25236-9be4-465c-8edf-4b385d274dc5/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2848.046690] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebb5cf66-ac80-43b4-9af5-a9f3947ec07c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.054473] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2848.054473] env[61964]: value = "task-1688781" [ 2848.054473] env[61964]: _type = "Task" [ 2848.054473] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2848.063442] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2848.383422] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2848.566130] env[61964]: DEBUG oslo_vmware.exceptions [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2848.566399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2848.566935] env[61964]: ERROR nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2848.566935] env[61964]: Faults: ['InvalidArgument'] [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Traceback (most recent call last): [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] yield resources [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self.driver.spawn(context, instance, image_meta, [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self._fetch_image_if_missing(context, vi) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] image_cache(vi, tmp_image_ds_loc) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] vm_util.copy_virtual_disk( [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] session._wait_for_task(vmdk_copy_task) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return self.wait_for_task(task_ref) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return evt.wait() [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] result = hub.switch() [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return self.greenlet.switch() [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self.f(*self.args, **self.kw) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] raise exceptions.translate_fault(task_info.error) [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Faults: ['InvalidArgument'] [ 2848.566935] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] [ 2848.567926] env[61964]: INFO nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Terminating instance [ 2848.568799] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2848.569028] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2848.569276] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af445f9b-3d75-4802-ba2a-535762d36209 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.571402] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2848.571592] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2848.572308] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3726b8-c2fb-4046-b18b-993c6e1cacd3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.579951] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2848.580167] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8c5da01-f812-405a-a561-08690da3b63d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.582174] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2848.582348] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2848.583287] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebdc5540-9150-472f-a60d-f768ca28271a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.587658] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 2848.587658] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52806620-b59e-ddc2-0203-5c9bbdec6263" [ 2848.587658] env[61964]: _type = "Task" [ 2848.587658] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2848.594430] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52806620-b59e-ddc2-0203-5c9bbdec6263, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2848.654097] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2848.654364] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2848.654547] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleting the datastore file [datastore1] 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2848.654800] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9f9cc09-ee11-4299-b947-292d289f4b26 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2848.660409] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for the task: (returnval){ [ 2848.660409] env[61964]: value = "task-1688783" [ 2848.660409] env[61964]: _type = "Task" [ 2848.660409] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2848.667601] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2849.098676] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2849.098676] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating directory with path [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2849.099043] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdcb978e-075f-45ab-a272-a40209f826ac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.110131] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created directory with path [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2849.110350] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Fetch image to [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2849.110529] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2849.111241] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99be35d-9225-4065-b4e6-594b3524c525 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.117255] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965e74e-b3b7-4cd8-a23e-e79ce8491a62 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.125902] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b224a3a5-9e79-4de6-b2d9-6537c815e99e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.154963] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b34c04-b401-420d-95f7-76ee805c89a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.160141] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aa9064c7-c7d2-4810-8912-4417a21b5523 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.169629] env[61964]: DEBUG oslo_vmware.api [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Task: {'id': task-1688783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072626} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2849.169850] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2849.170059] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2849.170264] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2849.170450] env[61964]: INFO nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2849.172443] env[61964]: DEBUG nova.compute.claims [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2849.172604] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2849.172815] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2849.184645] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2849.239093] env[61964]: DEBUG oslo_vmware.rw_handles [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2849.299534] env[61964]: DEBUG oslo_vmware.rw_handles [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2849.299733] env[61964]: DEBUG oslo_vmware.rw_handles [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2849.395025] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203f640d-dee2-4d76-8e01-aee476f62e31 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.401103] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9f9b00-cafe-4185-b85f-c084c67a35c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.430476] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5511be2a-1cc6-44c6-850b-3abcd9e5f680 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.437426] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db6c5cc-c127-4010-966a-c6c319d633d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.450059] env[61964]: DEBUG nova.compute.provider_tree [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2849.458026] env[61964]: DEBUG nova.scheduler.client.report [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2849.471335] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2849.471845] env[61964]: ERROR nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2849.471845] env[61964]: Faults: ['InvalidArgument'] [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Traceback (most recent call last): [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self.driver.spawn(context, instance, image_meta, [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self._fetch_image_if_missing(context, vi) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] image_cache(vi, tmp_image_ds_loc) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] vm_util.copy_virtual_disk( [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] session._wait_for_task(vmdk_copy_task) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return self.wait_for_task(task_ref) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return evt.wait() [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] result = hub.switch() [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] return self.greenlet.switch() [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] self.f(*self.args, **self.kw) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] raise exceptions.translate_fault(task_info.error) [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Faults: ['InvalidArgument'] [ 2849.471845] env[61964]: ERROR nova.compute.manager [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] [ 2849.472668] env[61964]: DEBUG nova.compute.utils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2849.474025] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Build of instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd was re-scheduled: A specified parameter was not correct: fileType [ 2849.474025] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2849.474398] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2849.474571] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2849.474738] env[61964]: DEBUG nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2849.474902] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2849.778413] env[61964]: DEBUG nova.network.neutron [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2849.796823] env[61964]: INFO nova.compute.manager [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Took 0.32 seconds to deallocate network for instance. [ 2849.893374] env[61964]: INFO nova.scheduler.client.report [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Deleted allocations for instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd [ 2849.915698] env[61964]: DEBUG oslo_concurrency.lockutils [None req-83b88ed1-81dd-4b18-a574-fcbe5fad6864 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.618s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2849.915991] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 395.015s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2849.916330] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Acquiring lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2849.916574] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2849.916753] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2849.918928] env[61964]: INFO nova.compute.manager [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Terminating instance [ 2849.921634] env[61964]: DEBUG nova.compute.manager [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2849.921634] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2849.921784] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e2de6a1-6cae-4410-9341-057861eda8cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.932415] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e39545-b0e7-47b7-8196-cce0925f71fe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2849.961573] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd could not be found. [ 2849.961847] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2849.961847] env[61964]: INFO nova.compute.manager [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2849.962182] env[61964]: DEBUG oslo.service.loopingcall [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2849.962424] env[61964]: DEBUG nova.compute.manager [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2849.962424] env[61964]: DEBUG nova.network.neutron [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2849.988505] env[61964]: DEBUG nova.network.neutron [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2849.999114] env[61964]: INFO nova.compute.manager [-] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] Took 0.04 seconds to deallocate network for instance. [ 2850.126050] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b83494bd-663b-4e66-8114-028d676e14d8 tempest-ImagesTestJSON-1538433138 tempest-ImagesTestJSON-1538433138-project-member] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2850.126660] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 204.534s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2850.126850] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 1ed57534-9d5c-4a4c-8493-4f7df75d9ddd] During sync_power_state the instance has a pending task (deleting). Skip. [ 2850.127036] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "1ed57534-9d5c-4a4c-8493-4f7df75d9ddd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2851.378801] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2853.854695] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2854.070571] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2854.383732] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2854.383977] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2854.395298] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2854.395513] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2854.395678] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2854.395839] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2854.397287] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f9f8b2-6c62-415c-92ad-52cd9c560acb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.405186] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f52862-c7aa-426a-9f3b-5b863582c0e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.420329] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc42f5f-19d2-4905-8087-bb87032cdef3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.426472] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc5ee54-5436-4a36-97e3-94ea1d68ed2b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.454976] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181356MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2854.455162] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2854.455316] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2854.519180] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519360] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519486] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519608] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519726] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519841] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.519955] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.520084] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2854.520265] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2854.520418] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2854.633857] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86758648-bdee-4311-a81b-1bdad561d923 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.641593] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad22103a-dd4d-4a22-b05f-3ef429b55cc2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.670957] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0b8ef0-3047-4a22-913f-a0a34f07968b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.677475] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd6c614-159c-4673-802b-faa664b8cd2c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.689937] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2854.698038] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2854.711667] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2854.711846] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.257s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2863.708775] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2884.525521] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "a50141eb-d189-4970-9adc-10a25409b99a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2884.525840] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2884.536417] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2884.592302] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2884.592548] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2884.594037] env[61964]: INFO nova.compute.claims [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2884.743566] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd75e41-3361-42a1-a080-602a40e91123 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.750639] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48341160-aca9-4049-9b1b-2ea8df830275 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.780991] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c1a71d-dc98-411b-9b91-a48eb932b2de {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.787916] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac57c751-9b7c-4464-8247-bbc06a60188a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.800448] env[61964]: DEBUG nova.compute.provider_tree [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2884.809920] env[61964]: DEBUG nova.scheduler.client.report [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2884.822223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.230s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2884.822671] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2884.853421] env[61964]: DEBUG nova.compute.utils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2884.854711] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2884.854876] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2884.862174] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2884.924826] env[61964]: DEBUG nova.policy [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd85799edb52540a9841505de3c5da6f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59626e94ae3944c8863fe312dd97d9bb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 2884.936093] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2884.966855] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2884.967145] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2884.967304] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2884.967490] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2884.967637] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2884.967784] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2884.967996] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2884.968166] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2884.968365] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2884.968532] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2884.968704] env[61964]: DEBUG nova.virt.hardware [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2884.970103] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779c0edd-f27c-402e-80ca-385febad11ac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2884.980123] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3539cb-d747-408e-8070-33b8deab4af0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2885.265461] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Successfully created port: b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2885.827077] env[61964]: DEBUG nova.compute.manager [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Received event network-vif-plugged-b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2885.827325] env[61964]: DEBUG oslo_concurrency.lockutils [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] Acquiring lock "a50141eb-d189-4970-9adc-10a25409b99a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2885.827504] env[61964]: DEBUG oslo_concurrency.lockutils [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] Lock "a50141eb-d189-4970-9adc-10a25409b99a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2885.827672] env[61964]: DEBUG oslo_concurrency.lockutils [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] Lock "a50141eb-d189-4970-9adc-10a25409b99a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2885.827992] env[61964]: DEBUG nova.compute.manager [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] No waiting events found dispatching network-vif-plugged-b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2885.827992] env[61964]: WARNING nova.compute.manager [req-3768edf0-508f-4e1c-bbc8-8f2cfe48def3 req-ea9c8e27-ec1b-4140-b006-895f96de8d81 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Received unexpected event network-vif-plugged-b0b35f25-6c4b-46f1-891e-85a921860926 for instance with vm_state building and task_state spawning. [ 2885.904478] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Successfully updated port: b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2885.912272] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2885.912774] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2885.912774] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2885.952745] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2886.137327] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Updating instance_info_cache with network_info: [{"id": "b0b35f25-6c4b-46f1-891e-85a921860926", "address": "fa:16:3e:31:8e:71", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b35f25-6c", "ovs_interfaceid": "b0b35f25-6c4b-46f1-891e-85a921860926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2886.148347] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2886.148626] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance network_info: |[{"id": "b0b35f25-6c4b-46f1-891e-85a921860926", "address": "fa:16:3e:31:8e:71", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b35f25-6c", "ovs_interfaceid": "b0b35f25-6c4b-46f1-891e-85a921860926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2886.149017] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:8e:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0b35f25-6c4b-46f1-891e-85a921860926', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2886.156552] env[61964]: DEBUG oslo.service.loopingcall [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2886.157024] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2886.157255] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ce5b6f1-9674-46f1-b9aa-291275b17157 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2886.177975] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2886.177975] env[61964]: value = "task-1688784" [ 2886.177975] env[61964]: _type = "Task" [ 2886.177975] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2886.186189] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688784, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2886.688681] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688784, 'name': CreateVM_Task, 'duration_secs': 0.283384} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2886.688852] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2886.689491] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2886.689658] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2886.689974] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2886.690228] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6efb204-5b0e-4190-b506-cff72ad0547b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2886.694383] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 2886.694383] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5288e7b4-09e1-a4b2-100d-4fc317c99460" [ 2886.694383] env[61964]: _type = "Task" [ 2886.694383] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2886.705220] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5288e7b4-09e1-a4b2-100d-4fc317c99460, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2887.205422] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2887.205780] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2887.205780] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2887.855934] env[61964]: DEBUG nova.compute.manager [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Received event network-changed-b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2887.855934] env[61964]: DEBUG nova.compute.manager [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Refreshing instance network info cache due to event network-changed-b0b35f25-6c4b-46f1-891e-85a921860926. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2887.855934] env[61964]: DEBUG oslo_concurrency.lockutils [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] Acquiring lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2887.855934] env[61964]: DEBUG oslo_concurrency.lockutils [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] Acquired lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2887.855934] env[61964]: DEBUG nova.network.neutron [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Refreshing network info cache for port b0b35f25-6c4b-46f1-891e-85a921860926 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2888.110451] env[61964]: DEBUG nova.network.neutron [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Updated VIF entry in instance network info cache for port b0b35f25-6c4b-46f1-891e-85a921860926. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2888.110943] env[61964]: DEBUG nova.network.neutron [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Updating instance_info_cache with network_info: [{"id": "b0b35f25-6c4b-46f1-891e-85a921860926", "address": "fa:16:3e:31:8e:71", "network": {"id": "18784f4f-4f78-49de-bf0f-6241674cfdcd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1996452367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59626e94ae3944c8863fe312dd97d9bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0b35f25-6c", "ovs_interfaceid": "b0b35f25-6c4b-46f1-891e-85a921860926", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2888.122835] env[61964]: DEBUG oslo_concurrency.lockutils [req-9c41a8c5-0fdf-48ed-b8b1-6035cc5074b4 req-1a06f8c0-0473-43e4-bc80-72670bf12ed4 service nova] Releasing lock "refresh_cache-a50141eb-d189-4970-9adc-10a25409b99a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2897.435821] env[61964]: WARNING oslo_vmware.rw_handles [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2897.435821] env[61964]: ERROR oslo_vmware.rw_handles [ 2897.436474] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2897.438575] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2897.438815] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Copying Virtual Disk [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/ed0dff78-9565-495c-968e-ea340fdc0736/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2897.439121] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d07b2906-1cb5-468c-9425-06c711bc9dc3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.446466] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 2897.446466] env[61964]: value = "task-1688785" [ 2897.446466] env[61964]: _type = "Task" [ 2897.446466] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2897.454081] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2897.957036] env[61964]: DEBUG oslo_vmware.exceptions [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2897.957261] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2897.957865] env[61964]: ERROR nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2897.957865] env[61964]: Faults: ['InvalidArgument'] [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Traceback (most recent call last): [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] yield resources [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self.driver.spawn(context, instance, image_meta, [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self._fetch_image_if_missing(context, vi) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] image_cache(vi, tmp_image_ds_loc) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] vm_util.copy_virtual_disk( [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] session._wait_for_task(vmdk_copy_task) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return self.wait_for_task(task_ref) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return evt.wait() [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] result = hub.switch() [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return self.greenlet.switch() [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self.f(*self.args, **self.kw) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] raise exceptions.translate_fault(task_info.error) [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Faults: ['InvalidArgument'] [ 2897.957865] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] [ 2897.958692] env[61964]: INFO nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Terminating instance [ 2897.959769] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2897.959979] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2897.960224] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f658b3be-4d7f-4983-b12b-ac70d61a2fbc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.964517] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2897.964728] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2897.965462] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fa4f4c-a59a-4c77-8a4d-3bc017394636 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.969264] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2897.969429] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2897.970077] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dd889ad-908c-42da-a612-627c95b53740 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.973997] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2897.974483] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b69cae6e-f744-49b7-bd16-32c032ea5a34 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2897.976691] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for the task: (returnval){ [ 2897.976691] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52853413-d6e3-79be-ba7d-14af49523b72" [ 2897.976691] env[61964]: _type = "Task" [ 2897.976691] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2897.984182] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52853413-d6e3-79be-ba7d-14af49523b72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2898.044334] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2898.044649] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2898.044839] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleting the datastore file [datastore1] 7d1977c2-cc88-4964-989a-9258f345c4f2 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2898.045129] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e3798fb-fc05-483a-a072-0bc883620751 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.053559] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 2898.053559] env[61964]: value = "task-1688787" [ 2898.053559] env[61964]: _type = "Task" [ 2898.053559] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2898.059105] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2898.487979] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2898.488389] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Creating directory with path [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2898.488514] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcdc258c-d025-43df-a5cc-9b1ebad4fffd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.499733] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Created directory with path [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2898.499967] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Fetch image to [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2898.500217] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2898.500971] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb8d07a-765c-41ca-bb94-a7a6b8cc0a8a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.507387] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cf9046-da91-456b-9616-5a393f9d4300 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.516249] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82c59f6-5554-4fa2-95b3-7ecd6dd52f1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.550820] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2eb9a1-be81-4907-9268-f52be745dca5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.561789] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-81d0e69d-3af5-4170-af46-c4771ce5e3ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.563433] env[61964]: DEBUG oslo_vmware.api [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078761} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2898.563666] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2898.563842] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2898.564020] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2898.564191] env[61964]: INFO nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2898.566278] env[61964]: DEBUG nova.compute.claims [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2898.566467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2898.566697] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2898.585818] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2898.640103] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2898.699763] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2898.699966] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2898.782453] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd9c0a1-bf66-4584-b275-314a69f85995 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.789817] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1f5529-c5c2-44a9-a511-c9280dd3e644 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.819378] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98443e0b-3ef2-4033-b521-3d1e274cae42 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.826815] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad226db5-9980-405d-af3e-a88ebd3f10be {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2898.839781] env[61964]: DEBUG nova.compute.provider_tree [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2898.847956] env[61964]: DEBUG nova.scheduler.client.report [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2898.861417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2898.862104] env[61964]: ERROR nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2898.862104] env[61964]: Faults: ['InvalidArgument'] [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Traceback (most recent call last): [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self.driver.spawn(context, instance, image_meta, [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self._fetch_image_if_missing(context, vi) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] image_cache(vi, tmp_image_ds_loc) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] vm_util.copy_virtual_disk( [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] session._wait_for_task(vmdk_copy_task) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return self.wait_for_task(task_ref) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return evt.wait() [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] result = hub.switch() [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] return self.greenlet.switch() [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] self.f(*self.args, **self.kw) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] raise exceptions.translate_fault(task_info.error) [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Faults: ['InvalidArgument'] [ 2898.862104] env[61964]: ERROR nova.compute.manager [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] [ 2898.862763] env[61964]: DEBUG nova.compute.utils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2898.864958] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Build of instance 7d1977c2-cc88-4964-989a-9258f345c4f2 was re-scheduled: A specified parameter was not correct: fileType [ 2898.864958] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2898.865342] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2898.865508] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2898.865673] env[61964]: DEBUG nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2898.865831] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2899.175733] env[61964]: DEBUG nova.network.neutron [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2899.192168] env[61964]: INFO nova.compute.manager [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Took 0.33 seconds to deallocate network for instance. [ 2899.279747] env[61964]: INFO nova.scheduler.client.report [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleted allocations for instance 7d1977c2-cc88-4964-989a-9258f345c4f2 [ 2899.300449] env[61964]: DEBUG oslo_concurrency.lockutils [None req-2ec4105a-0126-4a2b-9075-15eda22b9ed9 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.914s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2899.301128] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 437.796s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2899.301128] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2899.301250] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2899.301460] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2899.303574] env[61964]: INFO nova.compute.manager [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Terminating instance [ 2899.305644] env[61964]: DEBUG nova.compute.manager [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2899.305833] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2899.306110] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ac51978-b104-46ee-8a62-81c753675298 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2899.316535] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d86a6e-d6ac-43da-803b-db2b4cdcb90e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2899.344493] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d1977c2-cc88-4964-989a-9258f345c4f2 could not be found. [ 2899.344696] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2899.344867] env[61964]: INFO nova.compute.manager [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2899.345126] env[61964]: DEBUG oslo.service.loopingcall [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2899.345357] env[61964]: DEBUG nova.compute.manager [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2899.345455] env[61964]: DEBUG nova.network.neutron [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2899.367878] env[61964]: DEBUG nova.network.neutron [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2899.375833] env[61964]: INFO nova.compute.manager [-] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] Took 0.03 seconds to deallocate network for instance. [ 2899.466414] env[61964]: DEBUG oslo_concurrency.lockutils [None req-8e5aeb9c-1778-4bec-bb7e-8e58d76bc776 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2899.467250] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 253.875s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2899.467439] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 7d1977c2-cc88-4964-989a-9258f345c4f2] During sync_power_state the instance has a pending task (deleting). Skip. [ 2899.467617] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "7d1977c2-cc88-4964-989a-9258f345c4f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2900.383801] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2901.384395] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2901.384665] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2901.384703] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2901.402368] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.402520] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.402654] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.402786] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.402904] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.403039] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.403162] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.403282] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2901.403399] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2905.383383] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2905.383679] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2905.383774] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2906.384745] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2909.383532] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2910.384640] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2910.384640] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2910.393919] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] There are 0 instances to clean {{(pid=61964) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2911.388959] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2914.384235] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2914.396312] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2914.396698] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2914.396925] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2914.397276] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2914.398215] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdfceb6-4120-4c2f-bbc2-20c56e0d66c7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.406062] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ee0ddf-163a-49b0-be7c-a668c62d3715 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.419588] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e04a23-3998-4ad7-8b11-0167dc3a18c7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.425916] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148a30b7-54e5-41b5-b635-7213739c3627 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.455332] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181346MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2914.455489] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2914.455692] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2914.544316] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.544494] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.544592] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.544716] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.544843] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.544952] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.545082] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.545200] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a50141eb-d189-4970-9adc-10a25409b99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2914.545389] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2914.545531] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2914.560637] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2914.575336] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2914.575519] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2914.586527] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2914.605732] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2914.693344] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf8a85d-ceda-4a69-a07b-0f48eedc8c87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.701584] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1b7e27-da19-4a52-9a0d-1beb54ea285f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.729697] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6472d299-b804-4779-9013-0ece510bef11 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.736288] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7866848e-bfad-44f4-ad27-d01564c9a6a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2914.749387] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2914.757772] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2914.771287] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2914.771461] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.316s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2915.771390] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2920.384088] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2920.384451] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Cleaning up deleted instances with incomplete migration {{(pid=61964) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2924.385035] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2944.708914] env[61964]: WARNING oslo_vmware.rw_handles [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2944.708914] env[61964]: ERROR oslo_vmware.rw_handles [ 2944.709632] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2944.711705] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2944.712012] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Copying Virtual Disk [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/44e01a37-4b2b-4116-96b7-08998c582f52/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2944.712360] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dafcd9c-5b1c-4d04-8876-6d43aca3b0a6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2944.719733] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for the task: (returnval){ [ 2944.719733] env[61964]: value = "task-1688788" [ 2944.719733] env[61964]: _type = "Task" [ 2944.719733] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2944.727528] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Task: {'id': task-1688788, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2945.229803] env[61964]: DEBUG oslo_vmware.exceptions [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2945.230119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2945.231032] env[61964]: ERROR nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2945.231032] env[61964]: Faults: ['InvalidArgument'] [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Traceback (most recent call last): [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] yield resources [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self.driver.spawn(context, instance, image_meta, [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self._fetch_image_if_missing(context, vi) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] image_cache(vi, tmp_image_ds_loc) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] vm_util.copy_virtual_disk( [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] session._wait_for_task(vmdk_copy_task) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return self.wait_for_task(task_ref) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return evt.wait() [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] result = hub.switch() [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return self.greenlet.switch() [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self.f(*self.args, **self.kw) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] raise exceptions.translate_fault(task_info.error) [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Faults: ['InvalidArgument'] [ 2945.231032] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] [ 2945.231032] env[61964]: INFO nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Terminating instance [ 2945.232452] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2945.232648] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2945.232877] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7a00db0-951e-4dc1-aa0a-30e818acbf00 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.236528] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2945.236722] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2945.237454] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4e24c2-27b2-486e-be33-a5778939877b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.240749] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2945.240915] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2945.241832] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c5bc561-a8c6-425f-a708-4c3adb54ced6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.245710] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2945.246202] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d534283-a881-4a46-be2e-4f06c938e321 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.248335] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for the task: (returnval){ [ 2945.248335] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e8c622-3b97-9afb-36d9-3a3994b16ebc" [ 2945.248335] env[61964]: _type = "Task" [ 2945.248335] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2945.255824] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e8c622-3b97-9afb-36d9-3a3994b16ebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2945.323818] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2945.324060] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2945.324239] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Deleting the datastore file [datastore1] f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2945.324508] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b467e85f-ca85-40f1-88f8-0fb3a205e53b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.330722] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for the task: (returnval){ [ 2945.330722] env[61964]: value = "task-1688790" [ 2945.330722] env[61964]: _type = "Task" [ 2945.330722] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2945.337965] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Task: {'id': task-1688790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2945.758536] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2945.758904] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Creating directory with path [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2945.759068] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fc70525-a83f-45e5-995e-fc22d5cbb892 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.770736] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Created directory with path [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2945.770918] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Fetch image to [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2945.771106] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2945.771798] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29a4535-9ed4-4a49-8eab-41da92d4a092 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.779297] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d6c5cc-2dd0-4d19-8949-17449dbefb30 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.787938] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27590df1-2dda-4121-a9ed-89d232c56bb7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.817227] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786a03e5-508c-49a7-840e-9c4a774104f5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.822299] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6bd34d5e-f79c-4d03-a984-db76c180eaba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2945.837927] env[61964]: DEBUG oslo_vmware.api [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Task: {'id': task-1688790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092521} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2945.838158] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2945.838334] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2945.838501] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2945.838675] env[61964]: INFO nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2945.841627] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2945.843692] env[61964]: DEBUG nova.compute.claims [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2945.843857] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2945.844082] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2945.892484] env[61964]: DEBUG oslo_vmware.rw_handles [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2945.953329] env[61964]: DEBUG oslo_vmware.rw_handles [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2945.953576] env[61964]: DEBUG oslo_vmware.rw_handles [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2946.046801] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3738c4ca-d70e-4474-89fd-691bf08a62c7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.055132] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7f7575-ae7c-47f3-bb12-aeee77f93c35 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.086014] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d57560-4987-4adf-9ef5-41f71b3c2e47 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.093700] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09222710-2e7c-4d11-9a58-3aec235ea055 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.106689] env[61964]: DEBUG nova.compute.provider_tree [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2946.115176] env[61964]: DEBUG nova.scheduler.client.report [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2946.129389] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.285s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2946.129931] env[61964]: ERROR nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2946.129931] env[61964]: Faults: ['InvalidArgument'] [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Traceback (most recent call last): [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self.driver.spawn(context, instance, image_meta, [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self._fetch_image_if_missing(context, vi) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] image_cache(vi, tmp_image_ds_loc) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] vm_util.copy_virtual_disk( [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] session._wait_for_task(vmdk_copy_task) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return self.wait_for_task(task_ref) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return evt.wait() [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] result = hub.switch() [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] return self.greenlet.switch() [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] self.f(*self.args, **self.kw) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] raise exceptions.translate_fault(task_info.error) [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Faults: ['InvalidArgument'] [ 2946.129931] env[61964]: ERROR nova.compute.manager [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] [ 2946.130763] env[61964]: DEBUG nova.compute.utils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2946.132094] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Build of instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e was re-scheduled: A specified parameter was not correct: fileType [ 2946.132094] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2946.132461] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2946.132633] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2946.132786] env[61964]: DEBUG nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2946.132947] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2946.474867] env[61964]: DEBUG nova.network.neutron [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2946.495180] env[61964]: INFO nova.compute.manager [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Took 0.36 seconds to deallocate network for instance. [ 2946.598349] env[61964]: INFO nova.scheduler.client.report [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Deleted allocations for instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e [ 2946.630112] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d438cdc7-37d5-45bf-9c7f-b65fa053377b tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 574.746s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2946.630112] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 378.883s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2946.630239] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Acquiring lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2946.630360] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2946.630526] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2946.632605] env[61964]: INFO nova.compute.manager [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Terminating instance [ 2946.634644] env[61964]: DEBUG nova.compute.manager [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2946.634913] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2946.635396] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6df6b62-85e9-4cd0-a4a9-c40a4140b487 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.649933] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e08d911-15d8-440e-8dc3-b7a32976dbed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2946.675942] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e could not be found. [ 2946.676158] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2946.676333] env[61964]: INFO nova.compute.manager [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2946.676568] env[61964]: DEBUG oslo.service.loopingcall [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2946.677037] env[61964]: DEBUG nova.compute.manager [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2946.677145] env[61964]: DEBUG nova.network.neutron [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2946.699529] env[61964]: DEBUG nova.network.neutron [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2946.707569] env[61964]: INFO nova.compute.manager [-] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] Took 0.03 seconds to deallocate network for instance. [ 2946.798399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-759cbb1c-b343-49db-ae50-6cd4bda8502f tempest-ServerExternalEventsTest-1647517953 tempest-ServerExternalEventsTest-1647517953-project-member] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2946.799241] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 301.206s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2946.799421] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e] During sync_power_state the instance has a pending task (deleting). Skip. [ 2946.799591] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "f24e7f1d-4c72-40b8-aa6e-aac0a7274a3e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2961.390277] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2961.390635] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2961.390635] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2961.408813] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.408988] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409146] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409276] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409400] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409524] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409640] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2961.409770] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2962.383753] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2965.384609] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2965.644275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2967.384634] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2967.385044] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2967.385044] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2971.384039] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2972.379729] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2975.384622] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2975.396036] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2975.396274] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2975.396474] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2975.396637] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2975.397762] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a709843-49da-4d07-8944-ef44d75cf9ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.406676] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46d243a-48ed-461a-9531-a122c9bca8dd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.420562] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deec8673-e7a1-432f-9b74-a4e7f95d3877 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.426652] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b863a94-0894-45b1-9feb-504932f51344 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.456642] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181345MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2975.456792] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2975.456980] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2975.518797] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 04451950-2e85-46dd-a516-6b7743e03f7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.518956] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.519121] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.519260] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.519437] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.519727] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.519855] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a50141eb-d189-4970-9adc-10a25409b99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2975.520067] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2975.520220] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2975.613660] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de373b64-da23-4305-ac51-41d469bfc9da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.621239] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64b817d-11d7-47d3-bb7e-6b417add74c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.653075] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd7ea4f-f3df-4051-aa5b-a1f52cec1d07 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.659891] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35112f2c-b17d-414d-b5bf-25b58bf70e93 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.673654] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2975.681734] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2975.695522] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2975.695727] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.239s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2977.696116] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2988.381079] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2992.974184] env[61964]: WARNING oslo_vmware.rw_handles [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2992.974184] env[61964]: ERROR oslo_vmware.rw_handles [ 2992.974905] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2992.977084] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2992.977361] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Copying Virtual Disk [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/e89936f7-a04a-40f2-a0f0-a18d474462bd/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2992.977650] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-707d0752-663a-42c3-8a74-8961705b3271 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.985553] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for the task: (returnval){ [ 2992.985553] env[61964]: value = "task-1688791" [ 2992.985553] env[61964]: _type = "Task" [ 2992.985553] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2992.993502] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Task: {'id': task-1688791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.496296] env[61964]: DEBUG oslo_vmware.exceptions [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2993.496580] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2993.497140] env[61964]: ERROR nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2993.497140] env[61964]: Faults: ['InvalidArgument'] [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Traceback (most recent call last): [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] yield resources [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self.driver.spawn(context, instance, image_meta, [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self._fetch_image_if_missing(context, vi) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] image_cache(vi, tmp_image_ds_loc) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] vm_util.copy_virtual_disk( [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] session._wait_for_task(vmdk_copy_task) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return self.wait_for_task(task_ref) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return evt.wait() [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] result = hub.switch() [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return self.greenlet.switch() [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self.f(*self.args, **self.kw) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] raise exceptions.translate_fault(task_info.error) [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Faults: ['InvalidArgument'] [ 2993.497140] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] [ 2993.498057] env[61964]: INFO nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Terminating instance [ 2993.498939] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2993.499153] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2993.499416] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-819f1c17-0fd6-4af3-bf26-8fd276fd1fd3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.501501] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2993.501691] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2993.502408] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23152bda-c659-4608-b539-19b396a89c9a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.509245] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2993.509458] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76c94055-af3a-4a7c-bd27-46f2a6e3ff6c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.511517] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2993.511689] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2993.512608] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-301f5ddb-b467-406c-868b-06de4b183f67 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.516924] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 2993.516924] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f61997-8b4e-95b6-27ce-67f2e0ab50e4" [ 2993.516924] env[61964]: _type = "Task" [ 2993.516924] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.523759] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f61997-8b4e-95b6-27ce-67f2e0ab50e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.583195] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2993.583461] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2993.583714] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Deleting the datastore file [datastore1] 04451950-2e85-46dd-a516-6b7743e03f7d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2993.583914] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2da0bbf0-c174-4e0e-8940-d774f244a738 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.590102] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for the task: (returnval){ [ 2993.590102] env[61964]: value = "task-1688793" [ 2993.590102] env[61964]: _type = "Task" [ 2993.590102] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.597644] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Task: {'id': task-1688793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2994.026615] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2994.027030] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating directory with path [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2994.027154] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-076c3a73-2662-4c03-89af-dfc567d69716 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.037891] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created directory with path [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2994.038084] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Fetch image to [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2994.038256] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2994.038948] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515f8377-18c1-4fac-bdf0-6ed117f3c946 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.045185] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f55d292-efb9-4c05-ac61-99cea5cf5a9d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.053757] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbca74d5-cfce-4fb5-8e61-e34eaff1b1df {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.084306] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9aac9c-6cab-425d-bc68-68e4c0ad1acb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.089521] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-961fadbb-43c3-4f13-a200-c1764d0ea40b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.097931] env[61964]: DEBUG oslo_vmware.api [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Task: {'id': task-1688793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076928} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2994.098174] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2994.098352] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2994.098523] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2994.098694] env[61964]: INFO nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2994.100793] env[61964]: DEBUG nova.compute.claims [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2994.100963] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2994.101190] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2994.112387] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2994.163221] env[61964]: DEBUG oslo_vmware.rw_handles [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2994.224332] env[61964]: DEBUG oslo_vmware.rw_handles [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2994.224522] env[61964]: DEBUG oslo_vmware.rw_handles [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2994.286658] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea9ae40-30ae-4c65-a413-69c8f16c49dd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.293868] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c56bd6-9574-46d7-b7e5-b6fd2cbcdd26 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.323256] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6933bfad-d5b7-4b18-97f5-7ae8cf2fcb83 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.329991] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b688c5-56cd-40c0-a390-54f26227ff97 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.343477] env[61964]: DEBUG nova.compute.provider_tree [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2994.351961] env[61964]: DEBUG nova.scheduler.client.report [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2994.366179] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.265s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2994.366714] env[61964]: ERROR nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2994.366714] env[61964]: Faults: ['InvalidArgument'] [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Traceback (most recent call last): [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self.driver.spawn(context, instance, image_meta, [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self._fetch_image_if_missing(context, vi) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] image_cache(vi, tmp_image_ds_loc) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] vm_util.copy_virtual_disk( [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] session._wait_for_task(vmdk_copy_task) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return self.wait_for_task(task_ref) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return evt.wait() [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] result = hub.switch() [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] return self.greenlet.switch() [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] self.f(*self.args, **self.kw) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] raise exceptions.translate_fault(task_info.error) [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Faults: ['InvalidArgument'] [ 2994.366714] env[61964]: ERROR nova.compute.manager [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] [ 2994.367558] env[61964]: DEBUG nova.compute.utils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2994.368846] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Build of instance 04451950-2e85-46dd-a516-6b7743e03f7d was re-scheduled: A specified parameter was not correct: fileType [ 2994.368846] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2994.369257] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2994.369443] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2994.369618] env[61964]: DEBUG nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2994.369782] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2994.752360] env[61964]: DEBUG nova.network.neutron [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2994.764027] env[61964]: INFO nova.compute.manager [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Took 0.39 seconds to deallocate network for instance. [ 2994.860345] env[61964]: INFO nova.scheduler.client.report [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Deleted allocations for instance 04451950-2e85-46dd-a516-6b7743e03f7d [ 2994.881060] env[61964]: DEBUG oslo_concurrency.lockutils [None req-407f8914-da48-457e-8ef7-08d2eee3815e tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 534.291s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2994.881331] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 349.288s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2994.881517] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] During sync_power_state the instance has a pending task (spawning). Skip. [ 2994.881729] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2994.881969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 338.614s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2994.882198] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Acquiring lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2994.882394] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2994.882560] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2994.884395] env[61964]: INFO nova.compute.manager [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Terminating instance [ 2994.886057] env[61964]: DEBUG nova.compute.manager [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2994.886255] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2994.886762] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16649eaf-c68b-4235-9853-89479e9a3773 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.895643] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38284ff1-77fc-4e9b-be83-2f3407b6774e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.921225] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04451950-2e85-46dd-a516-6b7743e03f7d could not be found. [ 2994.921435] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2994.921635] env[61964]: INFO nova.compute.manager [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2994.921887] env[61964]: DEBUG oslo.service.loopingcall [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2994.922345] env[61964]: DEBUG nova.compute.manager [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2994.922449] env[61964]: DEBUG nova.network.neutron [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2994.947466] env[61964]: DEBUG nova.network.neutron [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2994.955041] env[61964]: INFO nova.compute.manager [-] [instance: 04451950-2e85-46dd-a516-6b7743e03f7d] Took 0.03 seconds to deallocate network for instance. [ 2995.038969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1613c38a-7ed6-41aa-9b33-7192ee1a3693 tempest-ServerActionsTestJSON-1740908949 tempest-ServerActionsTestJSON-1740908949-project-member] Lock "04451950-2e85-46dd-a516-6b7743e03f7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3021.384092] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3021.384385] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 3021.384430] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 3021.401154] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401282] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401415] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401545] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401670] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401794] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3021.401937] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 3022.383815] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3027.384643] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3028.383818] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3028.384147] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3028.384317] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 3032.385022] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3034.380795] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3036.384610] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3036.396811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3036.397022] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3036.397191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3036.397343] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3036.398476] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25115de1-620e-4b6d-9cc7-5f49da0634b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.407055] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8471fbcb-b370-44e5-a3e1-09672d97a45d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.420477] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e049240b-718e-4bdb-a311-b278c7181cab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.426541] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd0e396-2e24-4e89-a0bb-b58227a9f0f0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.454433] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181370MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3036.454572] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3036.454758] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3036.511767] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.511960] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.512049] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.512169] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.512286] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.512403] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a50141eb-d189-4970-9adc-10a25409b99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3036.512577] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3036.512711] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3036.587163] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b4f8c7-f101-451b-9dfb-359e5118424b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.594696] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca332a83-b000-41c7-950f-ac449e98e093 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.624274] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128b50f9-f365-4b9e-9fe8-9fde16689681 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.631331] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ad61f9-761c-4db6-b7e8-f74ce555aa1c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.644270] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3036.652116] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3036.665848] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3036.666054] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.211s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3038.115961] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3038.116233] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3038.127853] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 3038.183209] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3038.184036] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3038.185131] env[61964]: INFO nova.compute.claims [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3038.308818] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8897179c-bc79-4b9d-b550-1eb837b3700f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.315825] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c82fd1-7b52-4014-a419-94ad88144786 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.345836] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84184413-c844-4989-a8c4-7416fb1ea557 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.352450] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e847f0e0-513b-434a-a4a4-53bbdc3232c2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.366201] env[61964]: DEBUG nova.compute.provider_tree [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3038.374856] env[61964]: DEBUG nova.scheduler.client.report [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3038.387970] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.204s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3038.388447] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 3038.420847] env[61964]: DEBUG nova.compute.utils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3038.422086] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 3038.422263] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 3038.430711] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 3038.492309] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 3038.497720] env[61964]: DEBUG nova.policy [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c586df6784a4660ab9602e225cceece', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91a3f71b0c37437a98325ca4a2f48d46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 3038.512643] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3038.512876] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3038.513047] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3038.513234] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3038.513380] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3038.513525] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3038.513724] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3038.513880] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3038.514055] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3038.514220] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3038.514388] env[61964]: DEBUG nova.virt.hardware [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3038.515443] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b150d4d1-fa45-445d-9217-8533b4e9d297 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.522832] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d327825f-a777-498b-9efc-8f6750348fb2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.665307] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3038.799872] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Successfully created port: bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 3039.514018] env[61964]: DEBUG nova.compute.manager [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Received event network-vif-plugged-bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 3039.514255] env[61964]: DEBUG oslo_concurrency.lockutils [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] Acquiring lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3039.514439] env[61964]: DEBUG oslo_concurrency.lockutils [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] Lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3039.514625] env[61964]: DEBUG oslo_concurrency.lockutils [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] Lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3039.514774] env[61964]: DEBUG nova.compute.manager [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] No waiting events found dispatching network-vif-plugged-bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 3039.514933] env[61964]: WARNING nova.compute.manager [req-03a82f22-1edc-4f9c-b7c7-b28c3f38fcc4 req-2748cd84-ad3c-46b5-9cb2-dbd712aa3c74 service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Received unexpected event network-vif-plugged-bcb408ac-f316-40b9-b57f-23ac393bec06 for instance with vm_state building and task_state spawning. [ 3039.632485] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Successfully updated port: bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 3039.648458] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3039.648608] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3039.648756] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 3039.684281] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 3039.835753] env[61964]: DEBUG nova.network.neutron [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Updating instance_info_cache with network_info: [{"id": "bcb408ac-f316-40b9-b57f-23ac393bec06", "address": "fa:16:3e:cd:ea:04", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcb408ac-f3", "ovs_interfaceid": "bcb408ac-f316-40b9-b57f-23ac393bec06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3039.846246] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3039.846508] env[61964]: DEBUG nova.compute.manager [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Instance network_info: |[{"id": "bcb408ac-f316-40b9-b57f-23ac393bec06", "address": "fa:16:3e:cd:ea:04", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcb408ac-f3", "ovs_interfaceid": "bcb408ac-f316-40b9-b57f-23ac393bec06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 3039.846883] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:ea:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19440099-773e-4a31-b82e-84a4daa5d8fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcb408ac-f316-40b9-b57f-23ac393bec06', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3039.854446] env[61964]: DEBUG oslo.service.loopingcall [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3039.854868] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3039.855109] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ca1b1ff-cb2b-42d4-afd7-40302039b2e9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.875224] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3039.875224] env[61964]: value = "task-1688794" [ 3039.875224] env[61964]: _type = "Task" [ 3039.875224] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3039.882915] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688794, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.385216] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688794, 'name': CreateVM_Task, 'duration_secs': 0.300939} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3040.385398] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3040.386058] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3040.386231] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3040.386547] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3040.386791] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69180898-1659-4113-affe-2b6491a3979d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.390982] env[61964]: DEBUG oslo_vmware.api [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 3040.390982] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227cbe1-0a28-b460-658d-dc45c601877a" [ 3040.390982] env[61964]: _type = "Task" [ 3040.390982] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3040.398027] env[61964]: DEBUG oslo_vmware.api [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227cbe1-0a28-b460-658d-dc45c601877a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.901890] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3040.902172] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3040.902370] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3041.543018] env[61964]: DEBUG nova.compute.manager [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Received event network-changed-bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 3041.543135] env[61964]: DEBUG nova.compute.manager [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Refreshing instance network info cache due to event network-changed-bcb408ac-f316-40b9-b57f-23ac393bec06. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 3041.543350] env[61964]: DEBUG oslo_concurrency.lockutils [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] Acquiring lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3041.543491] env[61964]: DEBUG oslo_concurrency.lockutils [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] Acquired lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3041.543649] env[61964]: DEBUG nova.network.neutron [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Refreshing network info cache for port bcb408ac-f316-40b9-b57f-23ac393bec06 {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 3041.772446] env[61964]: DEBUG nova.network.neutron [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Updated VIF entry in instance network info cache for port bcb408ac-f316-40b9-b57f-23ac393bec06. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 3041.772796] env[61964]: DEBUG nova.network.neutron [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Updating instance_info_cache with network_info: [{"id": "bcb408ac-f316-40b9-b57f-23ac393bec06", "address": "fa:16:3e:cd:ea:04", "network": {"id": "c2949610-1be7-4d4a-972f-5d3a0fdcd4e6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2033976262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91a3f71b0c37437a98325ca4a2f48d46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcb408ac-f3", "ovs_interfaceid": "bcb408ac-f316-40b9-b57f-23ac393bec06", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3041.782395] env[61964]: DEBUG oslo_concurrency.lockutils [req-048cc3d2-5710-4c2d-bd1d-a4c298592546 req-7113eb94-17d6-4088-bf19-543c7adc37bf service nova] Releasing lock "refresh_cache-23ae2618-093b-49a7-b3e4-3d8038e08cc1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3043.112375] env[61964]: WARNING oslo_vmware.rw_handles [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3043.112375] env[61964]: ERROR oslo_vmware.rw_handles [ 3043.113136] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3043.115133] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3043.115393] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Copying Virtual Disk [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/066be1e7-9d34-4d1b-bc28-a3e6d2613361/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3043.115705] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cce1a1d-17e3-4380-a86e-5c9eada55669 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.122945] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 3043.122945] env[61964]: value = "task-1688795" [ 3043.122945] env[61964]: _type = "Task" [ 3043.122945] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3043.131255] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3043.632768] env[61964]: DEBUG oslo_vmware.exceptions [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3043.633012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3043.633545] env[61964]: ERROR nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3043.633545] env[61964]: Faults: ['InvalidArgument'] [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Traceback (most recent call last): [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] yield resources [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self.driver.spawn(context, instance, image_meta, [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self._fetch_image_if_missing(context, vi) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] image_cache(vi, tmp_image_ds_loc) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] vm_util.copy_virtual_disk( [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] session._wait_for_task(vmdk_copy_task) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return self.wait_for_task(task_ref) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return evt.wait() [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] result = hub.switch() [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return self.greenlet.switch() [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self.f(*self.args, **self.kw) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] raise exceptions.translate_fault(task_info.error) [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Faults: ['InvalidArgument'] [ 3043.633545] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] [ 3043.634396] env[61964]: INFO nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Terminating instance [ 3043.635353] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3043.635555] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3043.635784] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed7becbe-e919-43fb-9ee0-631517622537 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.637924] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3043.638129] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3043.638826] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0592ea64-f5de-4a58-b947-bf0ccc17543d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.645692] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3043.645903] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-257fd04b-de59-43bc-a096-a19186245543 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.659961] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3043.659961] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3043.660163] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f7d2044-5c15-413b-b551-e6578ec14864 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.665212] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 3043.665212] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52afc28d-43d8-ca72-ca35-2edd625c187b" [ 3043.665212] env[61964]: _type = "Task" [ 3043.665212] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3043.674751] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52afc28d-43d8-ca72-ca35-2edd625c187b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3044.175078] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3044.175371] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3044.175584] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17e6b8ac-4213-418a-985a-e7c294fa5721 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.195230] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3044.195466] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Fetch image to [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3044.195735] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3044.196668] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03be7dc8-459c-4695-8b86-0a5523f35cf4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.203429] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f663d59e-5514-43a3-a91e-a930c35a4e20 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.212081] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e01385-f14e-4097-b303-756ffd759bac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.241308] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2601abfa-2dff-4cca-8097-32d0bf8ac0f7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.246769] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-16833038-ca8c-4410-8c37-d6c5dc435cf0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.270349] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3044.317784] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3044.376619] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3044.376814] env[61964]: DEBUG oslo_vmware.rw_handles [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3044.985810] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3044.986057] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3044.986198] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleting the datastore file [datastore1] 841fd145-2c83-46a5-be0e-d0c6de409f67 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3044.986475] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e89f0e3-99ad-49b0-8e49-62b768cb8291 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.992982] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 3044.992982] env[61964]: value = "task-1688797" [ 3044.992982] env[61964]: _type = "Task" [ 3044.992982] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3045.000560] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3045.503164] env[61964]: DEBUG oslo_vmware.api [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091051} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3045.503510] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3045.503619] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3045.503786] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3045.503960] env[61964]: INFO nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Took 1.87 seconds to destroy the instance on the hypervisor. [ 3045.506748] env[61964]: DEBUG nova.compute.claims [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3045.506921] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3045.507148] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3045.648782] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d97a14d-1d6d-4e33-ad7e-9c0709d5919d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.656366] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7136a9-c46f-4ceb-8181-8accd8ea830f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.687204] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016276f9-92af-479b-810d-74eb4d0e5308 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.694326] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d15ba6-47ad-4398-9b5c-173aa5f64166 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.706925] env[61964]: DEBUG nova.compute.provider_tree [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3045.714964] env[61964]: DEBUG nova.scheduler.client.report [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3045.728506] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.221s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3045.729021] env[61964]: ERROR nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3045.729021] env[61964]: Faults: ['InvalidArgument'] [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Traceback (most recent call last): [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self.driver.spawn(context, instance, image_meta, [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self._fetch_image_if_missing(context, vi) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] image_cache(vi, tmp_image_ds_loc) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] vm_util.copy_virtual_disk( [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] session._wait_for_task(vmdk_copy_task) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return self.wait_for_task(task_ref) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return evt.wait() [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] result = hub.switch() [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] return self.greenlet.switch() [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] self.f(*self.args, **self.kw) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] raise exceptions.translate_fault(task_info.error) [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Faults: ['InvalidArgument'] [ 3045.729021] env[61964]: ERROR nova.compute.manager [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] [ 3045.729853] env[61964]: DEBUG nova.compute.utils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3045.731106] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Build of instance 841fd145-2c83-46a5-be0e-d0c6de409f67 was re-scheduled: A specified parameter was not correct: fileType [ 3045.731106] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3045.731475] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3045.731638] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3045.731807] env[61964]: DEBUG nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3045.731967] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3046.082828] env[61964]: DEBUG nova.network.neutron [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3046.093973] env[61964]: INFO nova.compute.manager [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Took 0.36 seconds to deallocate network for instance. [ 3046.188141] env[61964]: INFO nova.scheduler.client.report [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleted allocations for instance 841fd145-2c83-46a5-be0e-d0c6de409f67 [ 3046.211663] env[61964]: DEBUG oslo_concurrency.lockutils [None req-752ca0a9-f8aa-49e1-9ce3-d550a58478ce tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 555.676s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3046.211969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 400.618s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3046.212204] env[61964]: INFO nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] During sync_power_state the instance has a pending task (spawning). Skip. [ 3046.212420] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3046.213042] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 359.323s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3046.213311] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3046.213554] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3046.213749] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3046.215775] env[61964]: INFO nova.compute.manager [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Terminating instance [ 3046.217551] env[61964]: DEBUG nova.compute.manager [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3046.217826] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3046.218128] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fb676ef-5f7b-43e1-af83-c85dff185971 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.228096] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d4770b-dfde-4c28-8ba4-04b561f2e90a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.256413] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 841fd145-2c83-46a5-be0e-d0c6de409f67 could not be found. [ 3046.256654] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3046.256859] env[61964]: INFO nova.compute.manager [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3046.257153] env[61964]: DEBUG oslo.service.loopingcall [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3046.257394] env[61964]: DEBUG nova.compute.manager [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3046.257495] env[61964]: DEBUG nova.network.neutron [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3046.280633] env[61964]: DEBUG nova.network.neutron [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3046.288141] env[61964]: INFO nova.compute.manager [-] [instance: 841fd145-2c83-46a5-be0e-d0c6de409f67] Took 0.03 seconds to deallocate network for instance. [ 3046.373058] env[61964]: DEBUG oslo_concurrency.lockutils [None req-f573d0bd-0dfc-4129-a544-c88883a636c5 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "841fd145-2c83-46a5-be0e-d0c6de409f67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3054.831874] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "bfe39bad-a73a-49a7-b79b-98ba8babbbcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3054.831874] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "bfe39bad-a73a-49a7-b79b-98ba8babbbcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3054.845841] env[61964]: DEBUG nova.compute.manager [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 3054.905806] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3054.906119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3054.907652] env[61964]: INFO nova.compute.claims [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3055.071756] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d9726e-08a7-4061-aafd-f9984845db3b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.082274] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d568da-22c8-4b2f-b2f1-995cfecba0c3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.117218] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a409a3ea-3f91-4e35-bf6f-ee371ca036d2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.125300] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b030a34-e607-4167-9811-b186dada4221 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.133866] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "b4938cfb-bedf-4ecb-83fa-d172d6689414" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3055.134325] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "b4938cfb-bedf-4ecb-83fa-d172d6689414" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3055.143777] env[61964]: DEBUG nova.compute.provider_tree [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3055.152271] env[61964]: DEBUG nova.scheduler.client.report [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3055.161121] env[61964]: DEBUG nova.compute.manager [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 3055.168326] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3055.168802] env[61964]: DEBUG nova.compute.manager [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 3055.206991] env[61964]: DEBUG nova.compute.utils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3055.208520] env[61964]: DEBUG nova.compute.manager [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Not allocating networking since 'none' was specified. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 3055.221249] env[61964]: DEBUG nova.compute.manager [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 3055.224466] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3055.224690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3055.226084] env[61964]: INFO nova.compute.claims [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3055.292363] env[61964]: DEBUG nova.compute.manager [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 3055.364961] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3055.365196] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3055.365348] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3055.365524] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3055.365666] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3055.365810] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3055.366025] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3055.366181] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3055.366346] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3055.366504] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3055.366673] env[61964]: DEBUG nova.virt.hardware [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3055.367871] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724e509c-b65f-4e76-887e-4eebceee0eec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.378154] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edad16c2-77a5-429a-8165-878a2cac7c33 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.393899] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Instance VIF info [] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3055.399567] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Creating folder: Project (48343843622e41ec883090d331c45cd3). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 3055.402199] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98e31027-0eb3-4b48-9c83-b6963a497ff1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.412873] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Created folder: Project (48343843622e41ec883090d331c45cd3) in parent group-v351942. [ 3055.412873] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Creating folder: Instances. Parent ref: group-v352057. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 3055.413037] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e5cc37c-4da5-4193-8fc3-4ac0213009e2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.420873] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Created folder: Instances in parent group-v352057. [ 3055.421088] env[61964]: DEBUG oslo.service.loopingcall [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3055.422019] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3055.422019] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af9717ed-98e0-42e5-bbd3-d2bf45267cf7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.437647] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b89a15-9737-4f12-a6cb-583674a18109 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.444624] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617a4770-d5d0-45d7-8703-178340c94062 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.448670] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3055.448670] env[61964]: value = "task-1688800" [ 3055.448670] env[61964]: _type = "Task" [ 3055.448670] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.476629] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b369d66a-36f2-4e3e-8b34-aba1c8ff650c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.482674] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688800, 'name': CreateVM_Task} progress is 6%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.488779] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923e278b-28c0-4e05-bea0-719d2cf74cd4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.500171] env[61964]: DEBUG nova.compute.provider_tree [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3055.512099] env[61964]: DEBUG nova.scheduler.client.report [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3055.526697] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3055.527119] env[61964]: DEBUG nova.compute.manager [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 3055.560019] env[61964]: DEBUG nova.compute.utils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3055.560700] env[61964]: DEBUG nova.compute.manager [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Not allocating networking since 'none' was specified. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 3055.570708] env[61964]: DEBUG nova.compute.manager [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 3055.647442] env[61964]: DEBUG nova.compute.manager [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 3055.677534] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3055.677784] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3055.677938] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3055.678133] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3055.678279] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3055.678426] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3055.678627] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3055.678782] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3055.678946] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3055.679134] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3055.679308] env[61964]: DEBUG nova.virt.hardware [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3055.680206] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f7d437-24df-4d0c-8861-7e3091a44856 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.688664] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a169220-d52d-4fca-8cf0-4262d0159f2b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.438666] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Instance VIF info [] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3056.444412] env[61964]: DEBUG oslo.service.loopingcall [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3056.449816] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3056.450124] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "44fed5d6-967a-43e6-bfcf-91b107c41c83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3056.450335] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Lock "44fed5d6-967a-43e6-bfcf-91b107c41c83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3056.451517] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47cab96e-ff87-424c-bb58-0ce0290db11f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.463634] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Starting instance... {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 3056.471258] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688800, 'name': CreateVM_Task, 'duration_secs': 0.740554} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.472314] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3056.472507] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3056.472507] env[61964]: value = "task-1688801" [ 3056.472507] env[61964]: _type = "Task" [ 3056.472507] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.472852] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3056.473012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3056.473325] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3056.473579] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-873be6d6-9b93-4b03-af7a-7a26a85ba899 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.483168] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688801, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.483450] env[61964]: DEBUG oslo_vmware.api [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Waiting for the task: (returnval){ [ 3056.483450] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52727c12-9f9f-7a35-0d50-2192d6093af7" [ 3056.483450] env[61964]: _type = "Task" [ 3056.483450] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.492061] env[61964]: DEBUG oslo_vmware.api [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52727c12-9f9f-7a35-0d50-2192d6093af7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.513719] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3056.513956] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3056.515495] env[61964]: INFO nova.compute.claims [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3056.692871] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b944ddeb-2c41-4eed-b707-886d4ebb35ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.700742] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37a5651-0253-4405-82d7-53c3ffaab387 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.730744] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dc3223-b11d-483d-a503-21780fb07107 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.737621] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407ea0a2-b859-449f-a13b-d2aa506b3a6c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.750454] env[61964]: DEBUG nova.compute.provider_tree [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3056.761924] env[61964]: DEBUG nova.scheduler.client.report [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3056.776337] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3056.776337] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Start building networks asynchronously for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 3056.820984] env[61964]: DEBUG nova.compute.utils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Using /dev/sd instead of None {{(pid=61964) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3056.821848] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Allocating IP information in the background. {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 3056.822526] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] allocate_for_instance() {{(pid=61964) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 3056.831610] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Start building block device mappings for instance. {{(pid=61964) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 3056.891841] env[61964]: DEBUG nova.policy [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0c7f4136f6e45debfd37b68c85fdd03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aaf00de3673340f4931835ed9c7cb4c0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61964) authorize /opt/stack/nova/nova/policy.py:203}} [ 3056.895583] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Start spawning the instance on the hypervisor. {{(pid=61964) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 3056.922790] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-11-29T15:27:59Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-11-29T15:27:44Z,direct_url=,disk_format='vmdk',id=d9802c76-d112-4072-8a46-ca03ed36e004,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc053e04e4de43ce9a033f1ecfad3809',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-11-29T15:27:45Z,virtual_size=,visibility=), allow threads: False {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3056.922963] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Flavor limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3056.923130] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Image limits 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3056.923315] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Flavor pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3056.923460] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Image pref 0:0:0 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3056.923605] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61964) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3056.923807] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3056.923966] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3056.924146] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Got 1 possible topologies {{(pid=61964) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3056.924308] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3056.924477] env[61964]: DEBUG nova.virt.hardware [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61964) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3056.925453] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c755e4e2-a150-4165-af2f-08bbfcdd7b39 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.933790] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ee0746-61d3-4966-a87e-22d9be485c34 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.982631] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688801, 'name': CreateVM_Task, 'duration_secs': 0.293225} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.982798] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3056.983203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3056.992537] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3056.992774] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3056.992982] env[61964]: DEBUG oslo_concurrency.lockutils [None req-92c83ba2-4f17-4ce4-81bf-fd714e2977d5 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3056.993206] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3056.993506] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3056.993751] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6649281-5300-4107-9d7e-c6b944dc665c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.998741] env[61964]: DEBUG oslo_vmware.api [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Waiting for the task: (returnval){ [ 3056.998741] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5246180e-090f-0fb2-f4c9-346f6639c9f2" [ 3056.998741] env[61964]: _type = "Task" [ 3056.998741] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.006309] env[61964]: DEBUG oslo_vmware.api [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5246180e-090f-0fb2-f4c9-346f6639c9f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.221471] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Successfully created port: 118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 3057.512108] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3057.512453] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3057.512714] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33640e88-3638-437e-9f44-b97a0e261f34 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3057.815096] env[61964]: DEBUG nova.compute.manager [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Received event network-vif-plugged-118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 3057.815330] env[61964]: DEBUG oslo_concurrency.lockutils [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] Acquiring lock "44fed5d6-967a-43e6-bfcf-91b107c41c83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3057.815538] env[61964]: DEBUG oslo_concurrency.lockutils [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] Lock "44fed5d6-967a-43e6-bfcf-91b107c41c83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3057.815701] env[61964]: DEBUG oslo_concurrency.lockutils [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] Lock "44fed5d6-967a-43e6-bfcf-91b107c41c83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3057.815862] env[61964]: DEBUG nova.compute.manager [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] No waiting events found dispatching network-vif-plugged-118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 3057.817361] env[61964]: WARNING nova.compute.manager [req-91a9598a-4ac5-4625-b375-11b541335e58 req-7cd7effd-d507-4143-8f78-fc3801b7c131 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Received unexpected event network-vif-plugged-118e6c06-0dd5-435d-97e5-5e1f59e466ea for instance with vm_state building and task_state spawning. [ 3057.894293] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Successfully updated port: 118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 3057.906318] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3057.906457] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquired lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3057.906601] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Building network info cache for instance {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 3057.960115] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Instance cache missing network info. {{(pid=61964) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 3058.172516] env[61964]: DEBUG nova.network.neutron [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Updating instance_info_cache with network_info: [{"id": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "address": "fa:16:3e:af:49:e6", "network": {"id": "e44d54b6-8c3f-4f33-8990-d23c144700a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010617637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf00de3673340f4931835ed9c7cb4c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118e6c06-0d", "ovs_interfaceid": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3058.185525] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Releasing lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3058.185834] env[61964]: DEBUG nova.compute.manager [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Instance network_info: |[{"id": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "address": "fa:16:3e:af:49:e6", "network": {"id": "e44d54b6-8c3f-4f33-8990-d23c144700a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010617637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf00de3673340f4931835ed9c7cb4c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118e6c06-0d", "ovs_interfaceid": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61964) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 3058.186341] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:49:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '118e6c06-0dd5-435d-97e5-5e1f59e466ea', 'vif_model': 'vmxnet3'}] {{(pid=61964) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3058.194053] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Creating folder: Project (aaf00de3673340f4931835ed9c7cb4c0). Parent ref: group-v351942. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 3058.194977] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01827b4c-70c1-4416-b92d-ae865ac036bc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.206339] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Created folder: Project (aaf00de3673340f4931835ed9c7cb4c0) in parent group-v351942. [ 3058.206648] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Creating folder: Instances. Parent ref: group-v352061. {{(pid=61964) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 3058.206722] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-448d1332-4646-4088-be79-cdd24304362f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.215687] env[61964]: INFO nova.virt.vmwareapi.vm_util [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Created folder: Instances in parent group-v352061. [ 3058.215687] env[61964]: DEBUG oslo.service.loopingcall [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3058.215687] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Creating VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3058.215687] env[61964]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d67ecba-2316-4f64-8d70-13e6578b0b58 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.234585] env[61964]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3058.234585] env[61964]: value = "task-1688804" [ 3058.234585] env[61964]: _type = "Task" [ 3058.234585] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.241624] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688804, 'name': CreateVM_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3058.744744] env[61964]: DEBUG oslo_vmware.api [-] Task: {'id': task-1688804, 'name': CreateVM_Task, 'duration_secs': 0.281602} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3058.747079] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Created VM on the ESX host {{(pid=61964) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3058.747079] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3058.747079] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3058.747079] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3058.747079] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92b05e49-12b7-4ad0-a219-ead4746c664f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.751046] env[61964]: DEBUG oslo_vmware.api [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Waiting for the task: (returnval){ [ 3058.751046] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5252c91d-ecfc-02f7-9d24-c4edbb122491" [ 3058.751046] env[61964]: _type = "Task" [ 3058.751046] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.760076] env[61964]: DEBUG oslo_vmware.api [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5252c91d-ecfc-02f7-9d24-c4edbb122491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3059.261328] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3059.261636] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Processing image d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3059.261890] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1de90fff-b7a5-45ce-94f1-01e43c2495d1 tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3059.843080] env[61964]: DEBUG nova.compute.manager [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Received event network-changed-118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 3059.843376] env[61964]: DEBUG nova.compute.manager [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Refreshing instance network info cache due to event network-changed-118e6c06-0dd5-435d-97e5-5e1f59e466ea. {{(pid=61964) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 3059.843506] env[61964]: DEBUG oslo_concurrency.lockutils [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] Acquiring lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3059.843646] env[61964]: DEBUG oslo_concurrency.lockutils [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] Acquired lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3059.843800] env[61964]: DEBUG nova.network.neutron [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Refreshing network info cache for port 118e6c06-0dd5-435d-97e5-5e1f59e466ea {{(pid=61964) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 3060.077561] env[61964]: DEBUG nova.network.neutron [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Updated VIF entry in instance network info cache for port 118e6c06-0dd5-435d-97e5-5e1f59e466ea. {{(pid=61964) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 3060.077990] env[61964]: DEBUG nova.network.neutron [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Updating instance_info_cache with network_info: [{"id": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "address": "fa:16:3e:af:49:e6", "network": {"id": "e44d54b6-8c3f-4f33-8990-d23c144700a7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1010617637-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aaf00de3673340f4931835ed9c7cb4c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap118e6c06-0d", "ovs_interfaceid": "118e6c06-0dd5-435d-97e5-5e1f59e466ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3060.087483] env[61964]: DEBUG oslo_concurrency.lockutils [req-70de1dfa-844b-49d9-934c-98df6181a194 req-6b5bc0f9-00e1-409b-a042-ac6fae5eb450 service nova] Releasing lock "refresh_cache-44fed5d6-967a-43e6-bfcf-91b107c41c83" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3081.021605] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "a50141eb-d189-4970-9adc-10a25409b99a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3082.384299] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3082.384601] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 3082.384601] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 3082.404725] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.404903] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.404994] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405138] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405262] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405381] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405499] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405614] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405729] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3082.405847] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 3083.383614] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3088.385044] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3088.385044] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 3089.384653] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3090.384025] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3091.168472] env[61964]: WARNING oslo_vmware.rw_handles [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3091.168472] env[61964]: ERROR oslo_vmware.rw_handles [ 3091.168891] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3091.171320] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3091.171577] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Copying Virtual Disk [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/c4dc7edc-b8c0-4d66-871b-7a5af83d5326/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3091.171865] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb3968a6-cfdd-413b-9aaf-ad9db79dd14f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.180328] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 3091.180328] env[61964]: value = "task-1688805" [ 3091.180328] env[61964]: _type = "Task" [ 3091.180328] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.187694] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688805, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3091.692479] env[61964]: DEBUG oslo_vmware.exceptions [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3091.692903] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3091.693622] env[61964]: ERROR nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3091.693622] env[61964]: Faults: ['InvalidArgument'] [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Traceback (most recent call last): [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] yield resources [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self.driver.spawn(context, instance, image_meta, [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self._fetch_image_if_missing(context, vi) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] image_cache(vi, tmp_image_ds_loc) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] vm_util.copy_virtual_disk( [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] session._wait_for_task(vmdk_copy_task) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return self.wait_for_task(task_ref) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return evt.wait() [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] result = hub.switch() [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return self.greenlet.switch() [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self.f(*self.args, **self.kw) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] raise exceptions.translate_fault(task_info.error) [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Faults: ['InvalidArgument'] [ 3091.693622] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] [ 3091.694674] env[61964]: INFO nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Terminating instance [ 3091.695506] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3091.695725] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3091.695964] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-467ddec0-a3a1-41ab-8ca5-b0a227a32b1b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.698160] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3091.698355] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3091.699072] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2070445-e026-4da0-99a8-696abb46966b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.705690] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3091.705898] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd1a083c-dee4-4b92-aebe-4df407fb7a35 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.707910] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3091.708091] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3091.708978] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b06947-5c42-444b-a3d0-c431ac2c55b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.713444] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for the task: (returnval){ [ 3091.713444] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528cdde0-ecdb-1bff-64e7-a64e4cd65436" [ 3091.713444] env[61964]: _type = "Task" [ 3091.713444] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.720250] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528cdde0-ecdb-1bff-64e7-a64e4cd65436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3092.227638] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3092.228029] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Creating directory with path [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3092.228361] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-301b1394-54ae-4888-80b5-df01b2aa835a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.249962] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Created directory with path [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3092.250321] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Fetch image to [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3092.250576] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3092.251652] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c801427-3a9a-44c6-a468-30bed96eee89 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.260882] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a169f70-20ac-4bdd-b76a-bc31dac63136 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.273580] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e273f9df-ff52-4c28-ac99-f755e0db7caf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.322568] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed68621b-7ddb-4f27-911d-ddc80bdc888c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.328600] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-edfe62ed-c80c-4d7a-9ece-9f353d670aa4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.348076] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3092.396296] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3092.455376] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3092.455659] env[61964]: DEBUG oslo_vmware.rw_handles [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3093.383639] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3093.533668] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3093.533922] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3093.534116] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleting the datastore file [datastore1] 3e9d2629-bdab-4d87-8c22-1dc3589138ab {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3093.534383] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db94f95b-c82d-4177-a245-104473df0ca3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3093.540292] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 3093.540292] env[61964]: value = "task-1688807" [ 3093.540292] env[61964]: _type = "Task" [ 3093.540292] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3093.547988] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3094.050589] env[61964]: DEBUG oslo_vmware.api [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': task-1688807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06722} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3094.050838] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3094.051032] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3094.051207] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3094.051423] env[61964]: INFO nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Took 2.35 seconds to destroy the instance on the hypervisor. [ 3094.053545] env[61964]: DEBUG nova.compute.claims [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3094.053719] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3094.053934] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3094.203217] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe83e48-4798-441b-9b71-c0b4fea7a2e2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.210663] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec32e1b-1c68-41f3-a3b0-8e1dd48b8bd5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.240339] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1130306-35b0-44ab-a177-5e3682647558 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.246820] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448755e9-32a5-4423-a31a-729d8701c2da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.259215] env[61964]: DEBUG nova.compute.provider_tree [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3094.267552] env[61964]: DEBUG nova.scheduler.client.report [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3094.281206] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3094.281721] env[61964]: ERROR nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3094.281721] env[61964]: Faults: ['InvalidArgument'] [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Traceback (most recent call last): [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self.driver.spawn(context, instance, image_meta, [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self._fetch_image_if_missing(context, vi) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] image_cache(vi, tmp_image_ds_loc) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] vm_util.copy_virtual_disk( [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] session._wait_for_task(vmdk_copy_task) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return self.wait_for_task(task_ref) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return evt.wait() [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] result = hub.switch() [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] return self.greenlet.switch() [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] self.f(*self.args, **self.kw) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] raise exceptions.translate_fault(task_info.error) [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Faults: ['InvalidArgument'] [ 3094.281721] env[61964]: ERROR nova.compute.manager [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] [ 3094.282808] env[61964]: DEBUG nova.compute.utils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3094.283982] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Build of instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab was re-scheduled: A specified parameter was not correct: fileType [ 3094.283982] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3094.284511] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3094.284700] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3094.284874] env[61964]: DEBUG nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3094.285052] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3094.540369] env[61964]: DEBUG nova.network.neutron [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3094.552009] env[61964]: INFO nova.compute.manager [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Took 0.27 seconds to deallocate network for instance. [ 3094.657148] env[61964]: INFO nova.scheduler.client.report [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Deleted allocations for instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab [ 3094.682320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0421601b-0477-4c76-b7a8-4c0ff308d028 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 449.074s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3094.682320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 253.107s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3094.682320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3094.682320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3094.682320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3094.686464] env[61964]: INFO nova.compute.manager [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Terminating instance [ 3094.687981] env[61964]: DEBUG nova.compute.manager [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3094.687981] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3094.687981] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4c81822-1cc0-4bc0-99a8-0470d813e8a0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.697171] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ddadcc-0249-46ed-979b-b27560c9b69e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.723507] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3e9d2629-bdab-4d87-8c22-1dc3589138ab could not be found. [ 3094.723715] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3094.723891] env[61964]: INFO nova.compute.manager [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3094.724157] env[61964]: DEBUG oslo.service.loopingcall [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3094.724665] env[61964]: DEBUG nova.compute.manager [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3094.724763] env[61964]: DEBUG nova.network.neutron [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3094.746655] env[61964]: DEBUG nova.network.neutron [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3094.754703] env[61964]: INFO nova.compute.manager [-] [instance: 3e9d2629-bdab-4d87-8c22-1dc3589138ab] Took 0.03 seconds to deallocate network for instance. [ 3094.836678] env[61964]: DEBUG oslo_concurrency.lockutils [None req-6e12ab17-ff63-4b1b-92d5-2ede4d306799 tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Lock "3e9d2629-bdab-4d87-8c22-1dc3589138ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3095.379182] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3096.383946] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3096.394838] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3096.395132] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3096.395342] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3096.395513] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3096.396596] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dd6c0f-21f7-4def-a82a-4b3c34c32f60 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.405121] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d3f5b4-97b6-46ad-bdf0-f929bacead13 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.418374] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9d1bba-0262-4e0d-8a95-d6eacc76ec2f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.424517] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a20e72-9bd2-4b6a-800a-c19f203b3f68 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.453238] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181348MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3096.453382] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3096.453559] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3096.517559] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.517727] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.517854] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.517978] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a50141eb-d189-4970-9adc-10a25409b99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.518145] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 23ae2618-093b-49a7-b3e4-3d8038e08cc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.518279] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bfe39bad-a73a-49a7-b79b-98ba8babbbcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.518402] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b4938cfb-bedf-4ecb-83fa-d172d6689414 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.518519] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 44fed5d6-967a-43e6-bfcf-91b107c41c83 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3096.518700] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3096.518839] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3096.615832] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a0b1b5-65fb-4374-bd53-91a38c108ba3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.623461] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8407dc32-70d2-4290-9403-e1392072127d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.653307] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad39b69b-fa41-4a77-b341-cd1a4718baa0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.660408] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1901f1-9a65-4629-9455-1692f2055118 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.673201] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3096.681165] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3096.694691] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3096.694870] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.241s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3100.695279] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3110.379223] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3141.128068] env[61964]: WARNING oslo_vmware.rw_handles [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3141.128068] env[61964]: ERROR oslo_vmware.rw_handles [ 3141.128068] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3141.129412] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3141.129661] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Copying Virtual Disk [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/fa835fc7-5657-4990-b2f0-abdd17905bd0/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3141.129968] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcbdbbb9-4b9f-4aa6-82d1-64980c457975 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3141.137886] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for the task: (returnval){ [ 3141.137886] env[61964]: value = "task-1688808" [ 3141.137886] env[61964]: _type = "Task" [ 3141.137886] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3141.145521] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Task: {'id': task-1688808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3141.648545] env[61964]: DEBUG oslo_vmware.exceptions [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3141.648796] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3141.649357] env[61964]: ERROR nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3141.649357] env[61964]: Faults: ['InvalidArgument'] [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Traceback (most recent call last): [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] yield resources [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self.driver.spawn(context, instance, image_meta, [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self._fetch_image_if_missing(context, vi) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] image_cache(vi, tmp_image_ds_loc) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] vm_util.copy_virtual_disk( [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] session._wait_for_task(vmdk_copy_task) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return self.wait_for_task(task_ref) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return evt.wait() [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] result = hub.switch() [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return self.greenlet.switch() [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self.f(*self.args, **self.kw) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] raise exceptions.translate_fault(task_info.error) [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Faults: ['InvalidArgument'] [ 3141.649357] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] [ 3141.650282] env[61964]: INFO nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Terminating instance [ 3141.651284] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3141.651487] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3141.651735] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65558eba-2c67-45bf-a2fc-835da4200912 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3141.653939] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3141.654139] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3141.654847] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9961b9-660b-4ec2-9441-18fc8c2393d7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3141.661758] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3141.662013] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26f32b44-1f87-4a1c-b85e-cedcc60266c8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3141.664212] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3141.664385] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3141.665328] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e915aff-fd12-4a8a-9380-990ca20becee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3141.670027] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 3141.670027] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52692275-032c-c3a0-43be-44f0566603a6" [ 3141.670027] env[61964]: _type = "Task" [ 3141.670027] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3141.678242] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52692275-032c-c3a0-43be-44f0566603a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3142.007334] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3142.007505] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3142.007697] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Deleting the datastore file [datastore1] 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3142.007952] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ddf70db-0e12-41ef-914a-ba002a22cd7e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.013821] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for the task: (returnval){ [ 3142.013821] env[61964]: value = "task-1688810" [ 3142.013821] env[61964]: _type = "Task" [ 3142.013821] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3142.021468] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Task: {'id': task-1688810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3142.179561] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3142.179855] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating directory with path [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3142.180085] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b73b3d3-0274-412e-978b-af0d265c351f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.190733] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Created directory with path [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3142.190914] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Fetch image to [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3142.191096] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3142.191791] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f8d8c7-3bcc-487f-a2d7-6577826ed812 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.197952] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f77c78-471c-4b26-9424-11f8ac5f042c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.206677] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd30bc68-f84b-4db7-abdd-1f171575f866 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.236130] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ab4989-5cd3-4ffe-9a5f-bdf1bfc97ba2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.241211] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e286945-66c9-46a9-bc1a-a29aa461efb1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.263791] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3142.313997] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3142.373267] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3142.373471] env[61964]: DEBUG oslo_vmware.rw_handles [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3142.524106] env[61964]: DEBUG oslo_vmware.api [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Task: {'id': task-1688810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065665} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3142.524382] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3142.524594] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3142.524784] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3142.524954] env[61964]: INFO nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Took 0.87 seconds to destroy the instance on the hypervisor. [ 3142.526989] env[61964]: DEBUG nova.compute.claims [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3142.527171] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3142.527379] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3142.657199] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c010113-20fb-4fd2-b648-f183385be147 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.664628] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1ac817-6837-4af6-99c3-ea7e436cf661 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.694946] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a97dcf-9888-4162-b06d-c7f0ad52cbe8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.702934] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d725f63-6a6e-4eff-9739-9a63abb2fd1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3142.715930] env[61964]: DEBUG nova.compute.provider_tree [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3142.725030] env[61964]: DEBUG nova.scheduler.client.report [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3142.737316] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.210s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3142.737841] env[61964]: ERROR nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3142.737841] env[61964]: Faults: ['InvalidArgument'] [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Traceback (most recent call last): [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self.driver.spawn(context, instance, image_meta, [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self._fetch_image_if_missing(context, vi) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] image_cache(vi, tmp_image_ds_loc) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] vm_util.copy_virtual_disk( [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] session._wait_for_task(vmdk_copy_task) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return self.wait_for_task(task_ref) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return evt.wait() [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] result = hub.switch() [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] return self.greenlet.switch() [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] self.f(*self.args, **self.kw) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] raise exceptions.translate_fault(task_info.error) [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Faults: ['InvalidArgument'] [ 3142.737841] env[61964]: ERROR nova.compute.manager [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] [ 3142.738600] env[61964]: DEBUG nova.compute.utils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3142.740014] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Build of instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 was re-scheduled: A specified parameter was not correct: fileType [ 3142.740014] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3142.740409] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3142.740591] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3142.740790] env[61964]: DEBUG nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3142.741027] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3143.090289] env[61964]: DEBUG nova.network.neutron [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3143.102960] env[61964]: INFO nova.compute.manager [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Took 0.36 seconds to deallocate network for instance. [ 3143.195768] env[61964]: INFO nova.scheduler.client.report [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Deleted allocations for instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 [ 3143.218368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-d0882bf3-a3e7-4899-b738-b040230ca3e1 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 485.253s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3143.218676] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 289.148s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3143.218902] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Acquiring lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3143.219175] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3143.219364] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3143.221401] env[61964]: INFO nova.compute.manager [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Terminating instance [ 3143.223473] env[61964]: DEBUG nova.compute.manager [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3143.223671] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3143.224207] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d8e23e8-369f-409e-9ca6-42188d381b44 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3143.233187] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e80ae5-e0d4-4f80-a491-5a021399daf9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3143.261554] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6fe43dac-8e0d-4045-a7d7-5ed685c67a02 could not be found. [ 3143.261803] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3143.261978] env[61964]: INFO nova.compute.manager [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3143.262226] env[61964]: DEBUG oslo.service.loopingcall [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3143.262472] env[61964]: DEBUG nova.compute.manager [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3143.262519] env[61964]: DEBUG nova.network.neutron [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3143.289931] env[61964]: DEBUG nova.network.neutron [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3143.297552] env[61964]: INFO nova.compute.manager [-] [instance: 6fe43dac-8e0d-4045-a7d7-5ed685c67a02] Took 0.03 seconds to deallocate network for instance. [ 3143.386275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-39b1c29a-2e93-4eb7-b635-f0e1dd5ef7d9 tempest-ServerActionsTestOtherA-1754715348 tempest-ServerActionsTestOtherA-1754715348-project-member] Lock "6fe43dac-8e0d-4045-a7d7-5ed685c67a02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3144.384485] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3144.384910] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Starting heal instance info cache {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 3144.384910] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Rebuilding the list of instances to heal {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 3144.402107] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.402276] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.402442] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.402620] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.402855] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: bfe39bad-a73a-49a7-b79b-98ba8babbbcb] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.403052] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: b4938cfb-bedf-4ecb-83fa-d172d6689414] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.403800] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] [instance: 44fed5d6-967a-43e6-bfcf-91b107c41c83] Skipping network cache update for instance because it is Building. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 3144.403800] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Didn't find any instances for network info cache update. {{(pid=61964) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 3144.403902] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3148.383607] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3148.384232] env[61964]: DEBUG nova.compute.manager [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61964) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 3150.384676] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3151.383663] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3153.383947] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3157.379238] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3158.384553] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3158.397037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3158.397037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3158.397037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3158.397037] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61964) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 3158.398117] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860b6238-d1a3-4c70-8a60-cd90ab9a9455 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.406975] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2c310a-c394-4844-8161-7de3fa003208 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.420873] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1542d05-838e-4be0-abae-d5f38efaea9b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.426977] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c638cd0-ee3a-4dce-b960-fd51d0db68cb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.456283] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181336MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=61964) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 3158.456444] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3158.456619] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3158.516371] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.516524] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.516651] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance a50141eb-d189-4970-9adc-10a25409b99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.516770] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 23ae2618-093b-49a7-b3e4-3d8038e08cc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.516889] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance bfe39bad-a73a-49a7-b79b-98ba8babbbcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.517018] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance b4938cfb-bedf-4ecb-83fa-d172d6689414 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.517139] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Instance 44fed5d6-967a-43e6-bfcf-91b107c41c83 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61964) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 3158.517320] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 3158.517456] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61964) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 3158.600586] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9fc4fa-7708-45f5-8ca7-3975910cf133 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.608098] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495a8255-8abf-4872-b635-48b413988928 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.637010] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc630f94-83ab-4375-b5e7-0b2ebfca7597 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.643930] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640593ec-80ab-4510-ad0c-f3684a3fa947 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3158.657228] env[61964]: DEBUG nova.compute.provider_tree [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3158.665166] env[61964]: DEBUG nova.scheduler.client.report [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3158.677566] env[61964]: DEBUG nova.compute.resource_tracker [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61964) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 3158.677744] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.221s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3160.676995] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3176.385144] env[61964]: DEBUG oslo_service.periodic_task [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=61964) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 3176.385480] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3176.385994] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3176.386371] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3176.386548] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3176.386829] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3176.387091] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3176.408785] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b5dd1-fd50-420d-af6f-aee1cf9e44e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3176.418785] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254144b6-caa5-41db-b84f-2263dca73ca3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3176.444924] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-968562c5-fb8a-4b42-ac53-608687a7c4cb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3176.449508] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3176.449508] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524dd233-6e97-bb9d-f215-3986f09b5abc" [ 3176.449508] env[61964]: _type = "Task" [ 3176.449508] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3176.456949] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524dd233-6e97-bb9d-f215-3986f09b5abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3177.024301] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524dd233-6e97-bb9d-f215-3986f09b5abc, 'name': SearchDatastore_Task, 'duration_secs': 0.292578} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3177.024878] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3177.025043] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3177.025343] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3177.025793] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d024fd02-1891-4adb-89a0-2f6ea69c9f01 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3177.030956] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3177.030956] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d47452-422b-342a-d646-e8ac80c85ed2" [ 3177.030956] env[61964]: _type = "Task" [ 3177.030956] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3177.038583] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d47452-422b-342a-d646-e8ac80c85ed2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3177.541665] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d47452-422b-342a-d646-e8ac80c85ed2, 'name': SearchDatastore_Task, 'duration_secs': 0.010275} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3177.542559] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d is no longer used. Deleting! [ 3177.542710] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3177.542976] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3ed3ffe-f50d-4bfb-8820-6277f76b04da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3177.551073] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3177.551073] env[61964]: value = "task-1688811" [ 3177.551073] env[61964]: _type = "Task" [ 3177.551073] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3177.558083] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3178.061842] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104316} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3178.062117] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3178.062276] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1fffd2cd-a955-48a5-8644-c3dbfb21b71d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3178.062495] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3178.062613] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3178.062927] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3178.063206] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf05c5f-4a2f-47a8-a341-85be5db007b7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3178.067213] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3178.067213] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52021581-62e9-7fa9-27c6-3d9fd9f3dae3" [ 3178.067213] env[61964]: _type = "Task" [ 3178.067213] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3178.074296] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52021581-62e9-7fa9-27c6-3d9fd9f3dae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3178.577889] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52021581-62e9-7fa9-27c6-3d9fd9f3dae3, 'name': SearchDatastore_Task, 'duration_secs': 0.008972} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3178.598605] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10 is no longer used. Deleting! [ 3178.598605] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3178.598605] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75c4442a-39a1-4f5b-9542-a00df99009b6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3178.598605] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3178.598605] env[61964]: value = "task-1688812" [ 3178.598605] env[61964]: _type = "Task" [ 3178.598605] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3178.598605] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688812, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3179.095941] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688812, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101771} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3179.096184] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3179.096363] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3179.096588] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3179.096712] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3179.097055] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3179.097330] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d15870e3-1f1f-4ba3-b987-90fe5406301d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3179.101537] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3179.101537] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52845d6e-f171-816d-de97-037a6fb3a4a3" [ 3179.101537] env[61964]: _type = "Task" [ 3179.101537] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3179.108640] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52845d6e-f171-816d-de97-037a6fb3a4a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3179.611944] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52845d6e-f171-816d-de97-037a6fb3a4a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009637} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3179.612267] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8 is no longer used. Deleting! [ 3179.612411] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3179.612672] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e38f31f-844a-4251-9856-fc579d0e0906 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3179.618435] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3179.618435] env[61964]: value = "task-1688813" [ 3179.618435] env[61964]: _type = "Task" [ 3179.618435] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3179.625363] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3180.129210] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688813, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1275} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3180.129419] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3180.129597] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f2bad9b1-84fd-4a00-9c51-3a76f0ec81f8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3180.129816] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3180.129937] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3180.130286] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3180.130557] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7447361-5443-4b90-b509-fd775ad82669 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3180.134980] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3180.134980] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52205972-9a5f-1080-506f-90428043b574" [ 3180.134980] env[61964]: _type = "Task" [ 3180.134980] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3180.142663] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52205972-9a5f-1080-506f-90428043b574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3180.645549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52205972-9a5f-1080-506f-90428043b574, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3180.645820] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827/ts-2024-11-29-16-08-59 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3180.646097] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44586c65-39e3-450d-a21f-b46cd37576fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3180.658612] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827/ts-2024-11-29-16-08-59 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3180.658760] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image b6088630-8757-4bb1-b798-c7ed74c1d827 is no longer used by this node. Pending deletion! [ 3180.658948] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3180.659180] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3180.659298] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3180.659597] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3180.659843] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f48cda-039b-4c71-8fcf-db02e771cdbc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3180.664154] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3180.664154] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524322c7-6d52-7bcb-b4a6-5d0646d398cd" [ 3180.664154] env[61964]: _type = "Task" [ 3180.664154] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3180.671293] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524322c7-6d52-7bcb-b4a6-5d0646d398cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3181.174355] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524322c7-6d52-7bcb-b4a6-5d0646d398cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008197} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3181.174632] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569/ts-2024-11-29-16-09-00 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3181.174888] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe137574-35df-4a1d-929c-a1c17dba61fb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3181.187702] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569/ts-2024-11-29-16-09-00 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3181.187900] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image ce746a6a-2f86-4112-b2ee-e92df63d0569 is no longer used by this node. Pending deletion! [ 3181.187985] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3181.188203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3181.188322] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3181.188626] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3181.188866] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704d0497-1ae5-46a0-b97d-8ea5e40da891 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3181.192832] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3181.192832] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526ae8ff-23f9-5298-3d3b-fab563f73c8c" [ 3181.192832] env[61964]: _type = "Task" [ 3181.192832] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3181.199990] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526ae8ff-23f9-5298-3d3b-fab563f73c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3181.703390] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]526ae8ff-23f9-5298-3d3b-fab563f73c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.008636} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3181.703677] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e/ts-2024-11-29-16-09-00 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3181.703949] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91475720-0b71-4571-b5f9-53fea5b2b883 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3181.716636] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e/ts-2024-11-29-16-09-00 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3181.716802] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 1e0860e2-4aa5-48a7-bc29-49ea8e6f038e is no longer used by this node. Pending deletion! [ 3181.716943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3181.717200] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3181.717334] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3181.717680] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3181.717952] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baa83632-0a0f-447f-8ee3-8909ba4090e0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3181.722362] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3181.722362] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520c66a6-01c2-7ea9-5daa-b1a16144d1c6" [ 3181.722362] env[61964]: _type = "Task" [ 3181.722362] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3181.729532] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520c66a6-01c2-7ea9-5daa-b1a16144d1c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3182.232459] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520c66a6-01c2-7ea9-5daa-b1a16144d1c6, 'name': SearchDatastore_Task, 'duration_secs': 0.014973} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3182.232763] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d is no longer used. Deleting! [ 3182.232907] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3182.233170] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dab97749-e1ee-480b-9f01-1671aadba2e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3182.238785] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3182.238785] env[61964]: value = "task-1688814" [ 3182.238785] env[61964]: _type = "Task" [ 3182.238785] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3182.246301] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688814, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3182.748749] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688814, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133436} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3182.749072] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3182.749173] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/92e742dd-b0e5-450e-aa05-c37289c2a33d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3182.749387] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3182.749503] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3182.749798] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3182.750067] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e6c3737-1b81-4ebf-81ae-ff0fdaadcdff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3182.754291] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3182.754291] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52514460-8c4a-96e7-68eb-5ccdff8b839f" [ 3182.754291] env[61964]: _type = "Task" [ 3182.754291] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3182.761279] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52514460-8c4a-96e7-68eb-5ccdff8b839f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3183.265046] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52514460-8c4a-96e7-68eb-5ccdff8b839f, 'name': SearchDatastore_Task, 'duration_secs': 0.009712} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3183.265360] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b is no longer used. Deleting! [ 3183.265500] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3183.265754] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-041077d5-2643-4f32-b9de-9d83d611b858 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3183.271032] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3183.271032] env[61964]: value = "task-1688815" [ 3183.271032] env[61964]: _type = "Task" [ 3183.271032] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3183.278095] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3183.782053] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124571} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3183.782053] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3183.782053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1d50ce73-9d0f-41be-b09b-a02b915e071b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3183.782053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3183.782053] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3183.782483] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3183.782573] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd2fb0bc-c330-4029-9dbd-a142503ff6c8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3183.787295] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3183.787295] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52aee3fa-95f3-e17c-e905-4ab9ebbc4491" [ 3183.787295] env[61964]: _type = "Task" [ 3183.787295] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3183.795013] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52aee3fa-95f3-e17c-e905-4ab9ebbc4491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3184.297841] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52aee3fa-95f3-e17c-e905-4ab9ebbc4491, 'name': SearchDatastore_Task, 'duration_secs': 0.410941} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3184.298184] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79 is no longer used. Deleting! [ 3184.298333] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3184.298584] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-670c2583-059a-485a-9a09-53c757468156 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3184.305149] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3184.305149] env[61964]: value = "task-1688816" [ 3184.305149] env[61964]: _type = "Task" [ 3184.305149] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3184.312397] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3184.814499] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3185.315824] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3185.816971] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.189522} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3185.817345] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3185.817424] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/34704523-687b-4a2e-bf3c-e68bb6b9ad79" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3185.817567] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3185.817686] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3185.817996] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3185.818273] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c95dfff-ffd0-43be-b30f-e18a33f4b4ec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3185.822442] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3185.822442] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5a93b-b475-fc7a-03bd-9fb6179e5863" [ 3185.822442] env[61964]: _type = "Task" [ 3185.822442] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3185.829808] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5a93b-b475-fc7a-03bd-9fb6179e5863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3186.333177] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5a93b-b475-fc7a-03bd-9fb6179e5863, 'name': SearchDatastore_Task, 'duration_secs': 0.008704} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3186.333408] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b is no longer used. Deleting! [ 3186.333553] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3186.333811] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b4b98e5-220d-4518-8af8-614ee0064e03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3186.339429] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3186.339429] env[61964]: value = "task-1688817" [ 3186.339429] env[61964]: _type = "Task" [ 3186.339429] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3186.346924] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3186.848951] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103651} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3186.849282] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3186.849396] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/b272d223-8ccf-4596-b0ec-08bfc31cea5b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3186.849620] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3186.849735] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3186.850063] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3186.850338] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d993d59-38c7-4065-9fcb-dc8f02468de3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3186.854549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3186.854549] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521b587b-d592-192d-6c3c-796eb54d38fe" [ 3186.854549] env[61964]: _type = "Task" [ 3186.854549] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3186.861641] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521b587b-d592-192d-6c3c-796eb54d38fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3187.365083] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521b587b-d592-192d-6c3c-796eb54d38fe, 'name': SearchDatastore_Task, 'duration_secs': 0.007945} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3187.365360] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4/ts-2024-11-29-16-09-06 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3187.365647] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ada6bd3-c02c-42a1-893c-3bddc9028047 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3187.377702] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4/ts-2024-11-29-16-09-06 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3187.377881] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image c1d587ae-67a3-4f10-8d61-599b3786d3a4 is no longer used by this node. Pending deletion! [ 3187.378021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3187.378255] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3187.378413] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3187.378746] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3187.379029] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eebdc7a5-f5d6-42cf-b104-a55d46272216 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3187.383809] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3187.383809] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527122b7-aea5-3bc1-0488-0aee4269e821" [ 3187.383809] env[61964]: _type = "Task" [ 3187.383809] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3187.392161] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527122b7-aea5-3bc1-0488-0aee4269e821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3187.894403] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527122b7-aea5-3bc1-0488-0aee4269e821, 'name': SearchDatastore_Task, 'duration_secs': 0.00907} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3187.894797] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d is no longer used. Deleting! [ 3187.894842] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3187.895129] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-959ee75b-fc7c-4056-8bc5-54f28b525ae9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3187.901670] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3187.901670] env[61964]: value = "task-1688818" [ 3187.901670] env[61964]: _type = "Task" [ 3187.901670] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3187.909847] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3188.174028] env[61964]: WARNING oslo_vmware.rw_handles [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3188.174028] env[61964]: ERROR oslo_vmware.rw_handles [ 3188.174028] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3188.175982] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3188.176280] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Copying Virtual Disk [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/66a1bff4-85df-478a-a341-e5f8704d7802/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3188.176584] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37f0016d-5f18-4b1c-8dfc-16645eb8c2c7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.184872] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 3188.184872] env[61964]: value = "task-1688819" [ 3188.184872] env[61964]: _type = "Task" [ 3188.184872] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3188.192814] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3188.415067] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111475} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3188.415395] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3188.415636] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/507154ba-3bcc-412a-923d-8a51c1202d3d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3188.415934] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3188.416119] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3188.416558] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3188.416915] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e828ff11-2494-4ee6-b3b6-956333695513 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.422547] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3188.422547] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52304d33-2188-8db9-ff7d-104ee9c19971" [ 3188.422547] env[61964]: _type = "Task" [ 3188.422547] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3188.430850] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52304d33-2188-8db9-ff7d-104ee9c19971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3188.695824] env[61964]: DEBUG oslo_vmware.exceptions [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3188.696085] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3188.697239] env[61964]: ERROR nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3188.697239] env[61964]: Faults: ['InvalidArgument'] [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Traceback (most recent call last): [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] yield resources [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self.driver.spawn(context, instance, image_meta, [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self._fetch_image_if_missing(context, vi) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] image_cache(vi, tmp_image_ds_loc) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] vm_util.copy_virtual_disk( [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] session._wait_for_task(vmdk_copy_task) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return self.wait_for_task(task_ref) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return evt.wait() [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] result = hub.switch() [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return self.greenlet.switch() [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self.f(*self.args, **self.kw) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] raise exceptions.translate_fault(task_info.error) [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Faults: ['InvalidArgument'] [ 3188.697239] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] [ 3188.697239] env[61964]: INFO nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Terminating instance [ 3188.698639] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3188.698848] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3188.699157] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-826b0bd8-39d2-4635-b979-387c01045033 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.701272] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3188.701464] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3188.702201] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbf29e2-0b4d-44b0-bfa5-a50d0bba18e1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.708824] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3188.709043] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-744dde37-2a11-4268-a14b-ac2e36c844b0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.711134] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3188.711317] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3188.712245] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a872bcf4-b1ed-4098-9e56-fce0aa708573 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.716518] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for the task: (returnval){ [ 3188.716518] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f8747b-6ecb-b8a6-e03f-db601489cc58" [ 3188.716518] env[61964]: _type = "Task" [ 3188.716518] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3188.729478] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f8747b-6ecb-b8a6-e03f-db601489cc58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3188.933329] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52304d33-2188-8db9-ff7d-104ee9c19971, 'name': SearchDatastore_Task, 'duration_secs': 0.009545} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3188.933700] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9 is no longer used. Deleting! [ 3188.933741] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3188.933982] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc66c844-02a8-4c50-a684-045cb41bce7e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3188.940714] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3188.940714] env[61964]: value = "task-1688821" [ 3188.940714] env[61964]: _type = "Task" [ 3188.940714] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3188.948620] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3189.226020] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3189.226312] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Creating directory with path [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3189.226552] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8682eb3-ceae-4462-b448-9923541057c1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.238145] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Created directory with path [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3189.238345] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Fetch image to [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3189.238515] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3189.239316] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290fe379-a28c-4778-9b01-a026cbaff316 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.245686] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0df7434-8340-405c-8e34-78203dde37b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.254438] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43ce638-1037-4897-bfe1-d9e44b7c787a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.287467] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b2692c-3cfa-4f8c-a496-acc58a70ee71 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.290895] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-273d3e86-3909-44f6-bbd7-f48e99fa3d21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.315405] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3189.362919] env[61964]: DEBUG oslo_vmware.rw_handles [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3189.423038] env[61964]: DEBUG oslo_vmware.rw_handles [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3189.423038] env[61964]: DEBUG oslo_vmware.rw_handles [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3189.449905] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3189.450078] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3189.450252] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/aff5c776-fc9d-490d-9235-8c81c83328e9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3189.450468] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3189.450587] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3189.450894] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3189.451184] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-303da3d2-5f67-4f9a-b97b-56ebb7c6829c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3189.455379] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3189.455379] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be7a4f-c46d-c0d7-e67e-80a4398cdf6e" [ 3189.455379] env[61964]: _type = "Task" [ 3189.455379] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3189.462428] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be7a4f-c46d-c0d7-e67e-80a4398cdf6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3189.970134] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be7a4f-c46d-c0d7-e67e-80a4398cdf6e, 'name': SearchDatastore_Task, 'duration_secs': 0.008013} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3189.970498] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b/ts-2024-11-29-16-09-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3189.970827] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a79c95b1-82f6-4a4f-89a4-db8be971ea05 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3190.015504] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b/ts-2024-11-29-16-09-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3190.015696] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image e9d7a45b-6ee0-421f-82fb-db2ef8922c9b is no longer used by this node. Pending deletion! [ 3190.015821] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3190.016057] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/84f993b9-78f0-4b87-b55b-1a95b4904270" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3190.016191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/84f993b9-78f0-4b87-b55b-1a95b4904270" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3190.016527] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/84f993b9-78f0-4b87-b55b-1a95b4904270" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3190.016819] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33116893-212f-4e5b-9117-6ff30b871617 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3190.021628] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3190.021628] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282649a-ec18-eff8-9df3-9cdb84a09456" [ 3190.021628] env[61964]: _type = "Task" [ 3190.021628] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3190.030090] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282649a-ec18-eff8-9df3-9cdb84a09456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3190.533427] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282649a-ec18-eff8-9df3-9cdb84a09456, 'name': SearchDatastore_Task, 'duration_secs': 0.07103} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3190.533754] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/84f993b9-78f0-4b87-b55b-1a95b4904270" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3190.533973] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3190.534113] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3190.534420] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3190.534681] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72fd1fa-5568-432a-914e-525b2f7dd4c2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3190.538917] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3190.538917] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52873847-a5b8-a95d-c83b-f58d1e67bf27" [ 3190.538917] env[61964]: _type = "Task" [ 3190.538917] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3190.545871] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52873847-a5b8-a95d-c83b-f58d1e67bf27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3190.606699] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3190.606930] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3190.607128] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleting the datastore file [datastore1] e717f146-dd0a-4285-810f-8f9cc7ffaa6e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3190.607385] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed736a06-2b46-4f0f-81fc-2fd7a824449d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3190.613776] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for the task: (returnval){ [ 3190.613776] env[61964]: value = "task-1688822" [ 3190.613776] env[61964]: _type = "Task" [ 3190.613776] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3190.621319] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3191.048893] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52873847-a5b8-a95d-c83b-f58d1e67bf27, 'name': SearchDatastore_Task, 'duration_secs': 0.113953} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3191.049278] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3191.049460] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3191.049583] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3191.049886] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3191.050160] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18f494b7-f853-4bdb-936d-5b6d15225ff2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3191.054161] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3191.054161] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5203a48d-5b2c-6eea-5796-0ef1007aacb8" [ 3191.054161] env[61964]: _type = "Task" [ 3191.054161] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3191.060920] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5203a48d-5b2c-6eea-5796-0ef1007aacb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3191.125400] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3191.563934] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5203a48d-5b2c-6eea-5796-0ef1007aacb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3191.623505] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3192.064807] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5203a48d-5b2c-6eea-5796-0ef1007aacb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3192.125204] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3192.570031] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5203a48d-5b2c-6eea-5796-0ef1007aacb8, 'name': SearchDatastore_Task, 'duration_secs': 1.231103} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3192.570445] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138/ts-2024-11-29-16-09-11 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3192.570825] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31e47c74-3c74-4407-8be6-755125376857 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.584028] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138/ts-2024-11-29-16-09-11 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3192.586045] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 73eb3d41-0b31-4e9c-8df6-ade235fce138 is no longer used by this node. Pending deletion! [ 3192.586045] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/73eb3d41-0b31-4e9c-8df6-ade235fce138" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3192.586045] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3192.586045] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3192.586045] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3192.586045] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-988cfcd2-bb5a-4dd3-bd31-47da208a135d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.591393] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3192.591393] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c660df-b9ec-f373-d91b-7982c2f75cf1" [ 3192.591393] env[61964]: _type = "Task" [ 3192.591393] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3192.602535] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c660df-b9ec-f373-d91b-7982c2f75cf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3192.628104] env[61964]: DEBUG oslo_vmware.api [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Task: {'id': task-1688822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.713975} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3192.628494] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3192.628823] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3192.629166] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3192.629488] env[61964]: INFO nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Took 3.93 seconds to destroy the instance on the hypervisor. [ 3192.633175] env[61964]: DEBUG nova.compute.claims [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3192.633459] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3192.633803] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3192.765791] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8816bf05-c924-41ad-a278-97c042a737b6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.773631] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0642c695-b6f9-4271-9417-b5f9742a2ddf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.803660] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c572af1-6dfa-4a8f-ad8b-ac0c77d23826 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.811233] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ada0599-6e2b-4b53-9405-c947c251b9f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3192.824266] env[61964]: DEBUG nova.compute.provider_tree [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed in ProviderTree for provider: c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3192.832824] env[61964]: DEBUG nova.scheduler.client.report [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Inventory has not changed for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3192.849282] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.215s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3192.849823] env[61964]: ERROR nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3192.849823] env[61964]: Faults: ['InvalidArgument'] [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Traceback (most recent call last): [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self.driver.spawn(context, instance, image_meta, [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self._fetch_image_if_missing(context, vi) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] image_cache(vi, tmp_image_ds_loc) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] vm_util.copy_virtual_disk( [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] session._wait_for_task(vmdk_copy_task) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return self.wait_for_task(task_ref) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return evt.wait() [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] result = hub.switch() [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] return self.greenlet.switch() [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] self.f(*self.args, **self.kw) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] raise exceptions.translate_fault(task_info.error) [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Faults: ['InvalidArgument'] [ 3192.849823] env[61964]: ERROR nova.compute.manager [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] [ 3192.850728] env[61964]: DEBUG nova.compute.utils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3192.851979] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Build of instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e was re-scheduled: A specified parameter was not correct: fileType [ 3192.851979] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3192.852382] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3192.852560] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3192.852734] env[61964]: DEBUG nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3192.852897] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3193.102393] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c660df-b9ec-f373-d91b-7982c2f75cf1, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3193.102757] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3193.102982] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3193.103118] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3193.103437] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3193.103695] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ca8c9a-f0f5-40bc-8c54-60efd43f13f1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3193.107856] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3193.107856] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5251ba93-5f9c-3671-aa6d-09723d603c82" [ 3193.107856] env[61964]: _type = "Task" [ 3193.107856] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3193.115332] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5251ba93-5f9c-3671-aa6d-09723d603c82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3193.147064] env[61964]: DEBUG nova.network.neutron [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3193.161312] env[61964]: INFO nova.compute.manager [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Took 0.31 seconds to deallocate network for instance. [ 3193.254214] env[61964]: INFO nova.scheduler.client.report [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Deleted allocations for instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e [ 3193.276113] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c9eed79a-2d8c-4389-a889-2f899815941f tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 535.306s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3193.276466] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 339.422s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3193.276631] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Acquiring lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3193.277147] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3193.277147] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3193.279238] env[61964]: INFO nova.compute.manager [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Terminating instance [ 3193.281027] env[61964]: DEBUG nova.compute.manager [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3193.281282] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3193.281781] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75fcc050-25ac-4f6c-a39e-07962edffd31 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3193.290840] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841a040e-0b6f-478e-a4f7-fc23749947f9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3193.316727] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e717f146-dd0a-4285-810f-8f9cc7ffaa6e could not be found. [ 3193.316915] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3193.317115] env[61964]: INFO nova.compute.manager [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3193.317361] env[61964]: DEBUG oslo.service.loopingcall [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3193.317820] env[61964]: DEBUG nova.compute.manager [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3193.317920] env[61964]: DEBUG nova.network.neutron [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3193.347159] env[61964]: DEBUG nova.network.neutron [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3193.354777] env[61964]: INFO nova.compute.manager [-] [instance: e717f146-dd0a-4285-810f-8f9cc7ffaa6e] Took 0.04 seconds to deallocate network for instance. [ 3193.435012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-c19d4967-eae7-4893-b024-293b59c4b891 tempest-DeleteServersTestJSON-1027161782 tempest-DeleteServersTestJSON-1027161782-project-member] Lock "e717f146-dd0a-4285-810f-8f9cc7ffaa6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.158s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3193.619422] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5251ba93-5f9c-3671-aa6d-09723d603c82, 'name': SearchDatastore_Task, 'duration_secs': 0.01003} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3193.619733] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5 is no longer used. Deleting! [ 3193.619914] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3193.620219] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c2dd8d8-c2e9-485f-8739-19b10e53a199 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3193.626527] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3193.626527] env[61964]: value = "task-1688823" [ 3193.626527] env[61964]: _type = "Task" [ 3193.626527] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3193.634462] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3194.136158] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118731} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3194.136518] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3194.136518] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3194.136742] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3194.136859] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3194.137209] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3194.137472] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a9312f0-d4b3-4fe8-bf9f-79288b4150cf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3194.141745] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3194.141745] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5211d01d-84b8-5653-8379-3570bdc1f2e4" [ 3194.141745] env[61964]: _type = "Task" [ 3194.141745] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3194.148970] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5211d01d-84b8-5653-8379-3570bdc1f2e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3194.651700] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5211d01d-84b8-5653-8379-3570bdc1f2e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010495} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3194.651996] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1 is no longer used. Deleting! [ 3194.652191] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3194.652449] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a645140-6c39-4126-95cc-abeca8bec192 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3194.659416] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3194.659416] env[61964]: value = "task-1688824" [ 3194.659416] env[61964]: _type = "Task" [ 3194.659416] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3194.666655] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3195.169117] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114374} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3195.169470] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3195.169532] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ee5602cc-fe83-4f7b-99dc-3c95a048ced1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3195.169746] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3195.169864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3195.170193] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3195.170459] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c452a671-917d-40d6-93e7-b4fa0587ea21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3195.174729] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3195.174729] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5253d79c-9a9f-5946-6f3b-8d63f8634376" [ 3195.174729] env[61964]: _type = "Task" [ 3195.174729] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3195.182472] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5253d79c-9a9f-5946-6f3b-8d63f8634376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3195.684914] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5253d79c-9a9f-5946-6f3b-8d63f8634376, 'name': SearchDatastore_Task, 'duration_secs': 0.009782} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3195.685232] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a is no longer used. Deleting! [ 3195.685380] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3195.685641] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ce5333a-afc3-4f04-b899-c32620a06ebb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3195.691588] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3195.691588] env[61964]: value = "task-1688825" [ 3195.691588] env[61964]: _type = "Task" [ 3195.691588] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3195.700970] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3196.201389] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132908} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3196.201670] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3196.201770] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6fda1ca2-6c34-416b-9ca5-6060b59fdd9a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3196.201982] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3196.202116] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3196.202433] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3196.202695] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-735a70d3-8db8-419e-9ba2-f11ece5dc9ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3196.206743] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3196.206743] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520154f5-15f1-86a5-2903-6b53cabbbd73" [ 3196.206743] env[61964]: _type = "Task" [ 3196.206743] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3196.213669] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520154f5-15f1-86a5-2903-6b53cabbbd73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3196.717150] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520154f5-15f1-86a5-2903-6b53cabbbd73, 'name': SearchDatastore_Task, 'duration_secs': 0.008631} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3196.717428] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da/ts-2024-11-29-16-09-15 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3196.717671] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c0a399a-2f59-4058-a0f4-3f87af8233ae {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3196.729517] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da/ts-2024-11-29-16-09-15 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3196.729517] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image e1562371-25c6-4e77-a0d1-ca454820a5da is no longer used by this node. Pending deletion! [ 3196.729670] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3196.729936] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3196.729936] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3196.730203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3196.730432] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5465e656-61f1-464c-9f95-3325d720a9af {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3196.734218] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3196.734218] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521911c6-fde2-a188-2e7a-71d7b1dcb281" [ 3196.734218] env[61964]: _type = "Task" [ 3196.734218] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3196.741019] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521911c6-fde2-a188-2e7a-71d7b1dcb281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3197.245432] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521911c6-fde2-a188-2e7a-71d7b1dcb281, 'name': SearchDatastore_Task, 'duration_secs': 0.008176} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3197.245752] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0 is no longer used. Deleting! [ 3197.245859] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3197.246131] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e6ba847-6bb7-46f5-ae4d-74d3b8373053 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3197.251923] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3197.251923] env[61964]: value = "task-1688826" [ 3197.251923] env[61964]: _type = "Task" [ 3197.251923] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3197.258960] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3197.762172] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101792} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3197.762443] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3197.762523] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4732143d-796a-4a66-9f1e-806f8b0654e0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3197.762742] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3197.762877] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3197.763239] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3197.763510] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8543aa3-8ba2-4096-9f62-8970aafd9f2d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3197.767851] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3197.767851] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f153a7-7972-af14-00a2-7256b02d14dd" [ 3197.767851] env[61964]: _type = "Task" [ 3197.767851] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3197.775033] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f153a7-7972-af14-00a2-7256b02d14dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3198.278567] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f153a7-7972-af14-00a2-7256b02d14dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009282} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3198.278867] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be is no longer used. Deleting! [ 3198.278907] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3198.279190] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-feb412ab-5b31-4e5b-b4f3-a4a2042d0e1d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3198.285944] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3198.285944] env[61964]: value = "task-1688827" [ 3198.285944] env[61964]: _type = "Task" [ 3198.285944] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3198.293549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3198.796063] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103971} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3198.796322] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3198.796467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2c630f01-c07f-44c2-94ff-0d238e09b8be" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3198.796683] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3198.796800] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3198.797142] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3198.797412] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f0d5d19-01b0-4ea6-8934-19b91caf9e87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3198.801611] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3198.801611] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529357a9-cc14-1e0d-4322-3081ae46b081" [ 3198.801611] env[61964]: _type = "Task" [ 3198.801611] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3198.808755] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529357a9-cc14-1e0d-4322-3081ae46b081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3199.311746] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529357a9-cc14-1e0d-4322-3081ae46b081, 'name': SearchDatastore_Task, 'duration_secs': 0.009692} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3199.312100] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7 is no longer used. Deleting! [ 3199.312232] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3199.312454] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9635338b-466e-4b0e-8939-75d1df28803f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3199.318372] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3199.318372] env[61964]: value = "task-1688828" [ 3199.318372] env[61964]: _type = "Task" [ 3199.318372] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3199.325626] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3199.828200] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115637} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3199.828439] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3199.828638] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2594191b-2e14-499f-9d56-552c6ccbeda7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3199.828862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3199.828991] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3199.829325] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3199.829589] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce7777e-fea5-427d-8af3-901cdadcd1c6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3199.833740] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3199.833740] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4a50d-1eda-d002-0d8c-289ddda28c91" [ 3199.833740] env[61964]: _type = "Task" [ 3199.833740] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3199.841545] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4a50d-1eda-d002-0d8c-289ddda28c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3200.344245] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b4a50d-1eda-d002-0d8c-289ddda28c91, 'name': SearchDatastore_Task, 'duration_secs': 0.008423} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3200.344544] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327/ts-2024-11-29-16-09-19 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3200.344782] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adf8f5f6-f1d6-4d46-9de8-8842cdfc102c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3200.356053] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327/ts-2024-11-29-16-09-19 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3200.356201] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image f69db335-1cd0-4f6e-a5d1-8d90cfa58327 is no longer used by this node. Pending deletion! [ 3200.356365] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3200.356571] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3200.356686] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3200.356989] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3200.357249] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-689cf55a-4cf5-4eff-9456-6e49984c96e3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3200.361370] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3200.361370] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524b05e2-ae5a-77cf-2afb-e27dd398c294" [ 3200.361370] env[61964]: _type = "Task" [ 3200.361370] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3200.368478] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524b05e2-ae5a-77cf-2afb-e27dd398c294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3200.872524] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524b05e2-ae5a-77cf-2afb-e27dd398c294, 'name': SearchDatastore_Task, 'duration_secs': 0.008644} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3200.872828] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a is no longer used. Deleting! [ 3200.872973] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3200.873249] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-416b1511-77a1-48a9-862e-0b764e7875eb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3200.878902] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3200.878902] env[61964]: value = "task-1688829" [ 3200.878902] env[61964]: _type = "Task" [ 3200.878902] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3200.886087] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3201.388728] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124057} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3201.389089] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3201.389191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d8b6d25a-ae6f-4b86-aa1d-a92f9f1e5d2a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3201.389367] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3201.389503] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3201.389808] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3201.390092] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37fe19f4-8f43-4608-b748-853ba7d445ce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3201.394318] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3201.394318] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5210cd65-be95-a56a-f11c-96db9bec91da" [ 3201.394318] env[61964]: _type = "Task" [ 3201.394318] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3201.401723] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5210cd65-be95-a56a-f11c-96db9bec91da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3201.904927] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5210cd65-be95-a56a-f11c-96db9bec91da, 'name': SearchDatastore_Task, 'duration_secs': 0.010258} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3201.905185] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600 is no longer used. Deleting! [ 3201.905372] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3201.905632] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb3b9076-e643-495a-aa38-1f3586040ca7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3201.911420] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3201.911420] env[61964]: value = "task-1688830" [ 3201.911420] env[61964]: _type = "Task" [ 3201.911420] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3201.918472] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3202.421189] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106119} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3202.421572] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3202.421572] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3202.421796] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3202.421912] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3202.422239] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3202.422514] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35cf2ebc-a711-4229-ad86-3459129a091f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3202.427068] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3202.427068] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d45ed5-cca9-4cd1-5437-7d93cdc7e973" [ 3202.427068] env[61964]: _type = "Task" [ 3202.427068] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3202.434500] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d45ed5-cca9-4cd1-5437-7d93cdc7e973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3202.938122] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d45ed5-cca9-4cd1-5437-7d93cdc7e973, 'name': SearchDatastore_Task, 'duration_secs': 0.022449} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3202.938438] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379 is no longer used. Deleting! [ 3202.938583] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3202.938850] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81a5846-dc5e-484f-aa63-926657ebdc50 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3202.945301] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3202.945301] env[61964]: value = "task-1688831" [ 3202.945301] env[61964]: _type = "Task" [ 3202.945301] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3202.952824] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3203.455068] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120276} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3203.455311] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3203.455467] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6333a232-3c21-4d65-8158-6f45908b0379" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3203.455692] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3203.455813] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3203.456145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3203.456414] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c21ac93a-d5f9-4587-9399-6c75e3a3e238 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3203.460644] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3203.460644] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524fc026-4058-93f8-9923-1ca82b946868" [ 3203.460644] env[61964]: _type = "Task" [ 3203.460644] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3203.468307] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524fc026-4058-93f8-9923-1ca82b946868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3203.971733] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524fc026-4058-93f8-9923-1ca82b946868, 'name': SearchDatastore_Task, 'duration_secs': 0.008857} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3203.971991] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0/ts-2024-11-29-16-09-22 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3203.972263] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05114e6a-b311-4acb-9e68-2f1badfd2342 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3203.983801] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0/ts-2024-11-29-16-09-22 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3203.983948] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 5a058243-977f-4193-925a-cbbcbe85aaf0 is no longer used by this node. Pending deletion! [ 3203.984109] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3203.984320] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3203.984439] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3203.984742] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3203.984970] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c39aaadd-d532-4338-a98e-36dcfd20ec60 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3203.988969] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3203.988969] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527edaa9-c3d7-773a-4581-126531c372b8" [ 3203.988969] env[61964]: _type = "Task" [ 3203.988969] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3203.996143] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527edaa9-c3d7-773a-4581-126531c372b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3204.499730] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527edaa9-c3d7-773a-4581-126531c372b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009702} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3204.500109] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3204.500286] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3204.500409] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3204.500758] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3204.501038] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b2df7cc-1f36-46c2-81be-bdd28bf1b297 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3204.505512] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3204.505512] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a9022-2a43-ff55-ba36-6ade3bf4c77a" [ 3204.505512] env[61964]: _type = "Task" [ 3204.505512] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3204.512854] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a9022-2a43-ff55-ba36-6ade3bf4c77a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3205.015521] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a9022-2a43-ff55-ba36-6ade3bf4c77a, 'name': SearchDatastore_Task, 'duration_secs': 0.008778} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3205.015803] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e/ts-2024-11-29-16-09-23 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3205.016092] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93b4d6cd-6a35-4588-a0b2-7c0ae469a697 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3205.026931] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e/ts-2024-11-29-16-09-23 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3205.027082] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image ab2e6442-d7e9-4ecb-b882-d6f422152b5e is no longer used by this node. Pending deletion! [ 3205.027246] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3205.027454] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3205.027572] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3205.027879] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3205.028151] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8154d248-8fa1-4dd4-9d84-83f3f1aa0048 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3205.032212] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3205.032212] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d3de52-07f3-524f-b08a-ec7f0e76a006" [ 3205.032212] env[61964]: _type = "Task" [ 3205.032212] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3205.039061] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d3de52-07f3-524f-b08a-ec7f0e76a006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3205.543051] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d3de52-07f3-524f-b08a-ec7f0e76a006, 'name': SearchDatastore_Task, 'duration_secs': 0.007416} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3205.543399] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316/ts-2024-11-29-16-09-24 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3205.543399] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e49229f-aaa6-4381-ab4c-45f02f3ccb05 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3205.555196] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316/ts-2024-11-29-16-09-24 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3205.555323] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 2a9f7ead-9398-4d08-b589-bb77db498316 is no longer used by this node. Pending deletion! [ 3205.555487] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3205.555692] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3205.555808] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3205.556124] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3205.556359] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a0ec57c-dfa9-40ae-8347-5530e245e69c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3205.560392] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3205.560392] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52158141-f66b-0b49-6128-9495f5a6d58c" [ 3205.560392] env[61964]: _type = "Task" [ 3205.560392] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3205.567692] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52158141-f66b-0b49-6128-9495f5a6d58c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3206.071558] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52158141-f66b-0b49-6128-9495f5a6d58c, 'name': SearchDatastore_Task, 'duration_secs': 0.008403} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3206.071827] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6 is no longer used. Deleting! [ 3206.071971] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3206.072358] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25be8e9e-5961-4e67-88f7-24ed5deabdbc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3206.078404] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3206.078404] env[61964]: value = "task-1688832" [ 3206.078404] env[61964]: _type = "Task" [ 3206.078404] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3206.086151] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3206.588819] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113181} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3206.589186] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3206.589253] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/0e63e422-eeb9-4ba0-b58d-a5c6729ac4a6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3206.589475] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3206.589594] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3206.589902] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3206.590184] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85a2508d-5839-4380-a7f3-80d7f45415e8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3206.594492] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3206.594492] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e04bfb-54fb-5099-c039-55ca27a0186d" [ 3206.594492] env[61964]: _type = "Task" [ 3206.594492] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3206.602930] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e04bfb-54fb-5099-c039-55ca27a0186d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3207.104789] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e04bfb-54fb-5099-c039-55ca27a0186d, 'name': SearchDatastore_Task, 'duration_secs': 0.009531} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3207.105144] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754 is no longer used. Deleting! [ 3207.105317] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3207.105590] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e53fc04-b551-4342-a5af-eeb642d5fc47 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3207.112510] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3207.112510] env[61964]: value = "task-1688833" [ 3207.112510] env[61964]: _type = "Task" [ 3207.112510] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3207.119876] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3207.622479] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110804} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3207.622846] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3207.622846] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3207.623073] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3207.623195] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3207.623514] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3207.623772] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38c345d5-50c1-4585-aeb4-48823ab13042 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3207.627954] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3207.627954] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525429c4-3acb-05bc-b3a8-3a4ba2b15a78" [ 3207.627954] env[61964]: _type = "Task" [ 3207.627954] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3207.636214] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525429c4-3acb-05bc-b3a8-3a4ba2b15a78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3208.138508] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525429c4-3acb-05bc-b3a8-3a4ba2b15a78, 'name': SearchDatastore_Task, 'duration_secs': 0.009376} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3208.138810] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27 is no longer used. Deleting! [ 3208.138955] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3208.139225] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30fea038-8e06-406c-89b7-cdf88be35be1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3208.145557] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3208.145557] env[61964]: value = "task-1688834" [ 3208.145557] env[61964]: _type = "Task" [ 3208.145557] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3208.153189] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3208.655668] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10278} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3208.655973] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3208.656031] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/68d0f625-58c2-45b8-9d5e-1d4b2260ca27" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3208.656234] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3208.656355] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3208.656655] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3208.656903] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f9735e4-4357-4aff-9524-9b48ec4260ec {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3208.660925] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3208.660925] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b5632b-df64-f02e-769c-e2d74e73584b" [ 3208.660925] env[61964]: _type = "Task" [ 3208.660925] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3208.668440] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b5632b-df64-f02e-769c-e2d74e73584b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3209.171562] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b5632b-df64-f02e-769c-e2d74e73584b, 'name': SearchDatastore_Task, 'duration_secs': 0.008741} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3209.171839] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a/ts-2024-11-29-16-09-28 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3209.172113] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18dc2d53-f379-4cb8-9587-78a1b50571ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3209.184407] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a/ts-2024-11-29-16-09-28 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3209.184563] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 01c170c5-473f-4f73-a141-89448c587a8a is no longer used by this node. Pending deletion! [ 3209.184723] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/01c170c5-473f-4f73-a141-89448c587a8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3209.184928] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3209.185060] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3209.185363] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3209.185607] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08429757-15b4-4c13-bb6a-13f5bf81ecfb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3209.189659] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3209.189659] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52139bf9-8afa-0035-3bbc-003d427fcde0" [ 3209.189659] env[61964]: _type = "Task" [ 3209.189659] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3209.196980] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52139bf9-8afa-0035-3bbc-003d427fcde0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3209.700229] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52139bf9-8afa-0035-3bbc-003d427fcde0, 'name': SearchDatastore_Task, 'duration_secs': 0.008248} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3209.700544] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf/ts-2024-11-29-16-09-28 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3209.700660] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c952b89-e9f1-4039-b2db-476f4666a3b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3209.712261] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf/ts-2024-11-29-16-09-28 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3209.712407] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 7d29403c-0bcb-468e-8725-018fefeb81cf is no longer used by this node. Pending deletion! [ 3209.712565] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/7d29403c-0bcb-468e-8725-018fefeb81cf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3209.712802] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3209.712894] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3209.713240] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3209.713486] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db1143ce-0745-4711-9462-36218f16b3cd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3209.717704] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3209.717704] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524f79f3-b176-c36b-4ad4-73d634913e76" [ 3209.717704] env[61964]: _type = "Task" [ 3209.717704] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3209.725625] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524f79f3-b176-c36b-4ad4-73d634913e76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3210.228881] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524f79f3-b176-c36b-4ad4-73d634913e76, 'name': SearchDatastore_Task, 'duration_secs': 0.009158} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3210.229220] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2 is no longer used. Deleting! [ 3210.229364] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3210.229624] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b69759f-1c3c-4833-b407-863687a24600 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3210.235824] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3210.235824] env[61964]: value = "task-1688835" [ 3210.235824] env[61964]: _type = "Task" [ 3210.235824] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3210.242922] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3210.745365] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167966} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3210.745735] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3210.745777] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/adc837ba-6b24-4b82-aea4-c119ca7c31b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3210.745979] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3210.746117] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3210.746555] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3210.746825] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6df0c0b-04b5-4b63-93db-9e5cb4ea53f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3210.750969] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3210.750969] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ba207c-b589-46c7-28f1-9f6995fe5c15" [ 3210.750969] env[61964]: _type = "Task" [ 3210.750969] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3210.758274] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ba207c-b589-46c7-28f1-9f6995fe5c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3211.260631] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ba207c-b589-46c7-28f1-9f6995fe5c15, 'name': SearchDatastore_Task, 'duration_secs': 0.0098} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3211.260888] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406/ts-2024-11-29-16-09-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3211.261156] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6a546c0-01a1-488b-8b34-2137188afd65 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3211.285179] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406/ts-2024-11-29-16-09-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3211.285281] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 205c5c90-12aa-415b-9e62-3796592bc406 is no longer used by this node. Pending deletion! [ 3211.285419] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/205c5c90-12aa-415b-9e62-3796592bc406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3211.285633] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3211.285749] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3211.287028] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3211.287028] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-024bef7e-2199-4edd-aebd-316ed07c74f0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3211.290398] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3211.290398] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ca45b3-abc2-e017-2f6d-74a7049d04ac" [ 3211.290398] env[61964]: _type = "Task" [ 3211.290398] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3211.298032] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ca45b3-abc2-e017-2f6d-74a7049d04ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3211.800322] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ca45b3-abc2-e017-2f6d-74a7049d04ac, 'name': SearchDatastore_Task, 'duration_secs': 0.016365} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3211.800717] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a/ts-2024-11-29-16-09-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3211.800839] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34d30005-42f0-48a6-abd5-88ea09f47218 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3211.818288] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a/ts-2024-11-29-16-09-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3211.818456] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 28f384a9-aef1-475a-b052-bba2a8e08a4a is no longer used by this node. Pending deletion! [ 3211.818649] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3211.818896] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3211.819059] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3211.819396] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3211.819662] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5877d31-d552-4b58-b30a-884a2bb0596f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3211.823957] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3211.823957] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52570b3e-f191-818f-96bb-ad5f763720ef" [ 3211.823957] env[61964]: _type = "Task" [ 3211.823957] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3211.831812] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52570b3e-f191-818f-96bb-ad5f763720ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3212.335471] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52570b3e-f191-818f-96bb-ad5f763720ef, 'name': SearchDatastore_Task, 'duration_secs': 0.008467} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3212.335777] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e is no longer used. Deleting! [ 3212.335921] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3212.336196] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48837311-0926-4bf4-9afd-93b52d2e1951 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3212.342021] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3212.342021] env[61964]: value = "task-1688836" [ 3212.342021] env[61964]: _type = "Task" [ 3212.342021] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3212.348887] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3212.852075] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277918} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3212.852376] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3212.852507] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/901ec13c-04f7-4571-86e2-3db81a5e182e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3212.852725] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3212.852882] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3212.853234] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3212.853502] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2c8a706-e4bf-4eac-98be-832fb903d144 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3212.857669] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3212.857669] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52453bee-97ed-c7c3-9f01-5e0692941055" [ 3212.857669] env[61964]: _type = "Task" [ 3212.857669] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3212.864858] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52453bee-97ed-c7c3-9f01-5e0692941055, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3213.368124] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52453bee-97ed-c7c3-9f01-5e0692941055, 'name': SearchDatastore_Task, 'duration_secs': 0.008774} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3213.368418] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89/ts-2024-11-29-16-09-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3213.368695] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d0389de-1419-4652-9eb8-231103f06cd1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3213.380242] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89/ts-2024-11-29-16-09-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3213.380360] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 5c4dd705-3ebd-49c2-9c06-9370b69c5d89 is no longer used by this node. Pending deletion! [ 3213.380491] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3213.380718] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3213.380807] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3213.381128] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3213.381390] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b80cb1-2b94-448b-af79-f217c726bb1e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3213.385289] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3213.385289] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c24493-8bca-cc12-6f91-6c308832bb80" [ 3213.385289] env[61964]: _type = "Task" [ 3213.385289] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3213.392200] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c24493-8bca-cc12-6f91-6c308832bb80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3213.896271] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c24493-8bca-cc12-6f91-6c308832bb80, 'name': SearchDatastore_Task, 'duration_secs': 0.007995} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3213.896578] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0 is no longer used. Deleting! [ 3213.896715] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3213.896974] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4c1ce8f-7a90-4d25-86df-99f2d17e36b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3213.903082] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3213.903082] env[61964]: value = "task-1688837" [ 3213.903082] env[61964]: _type = "Task" [ 3213.903082] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3213.910287] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3214.413375] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110559} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3214.413622] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3214.413822] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1a4662f7-ab26-42f3-8aa8-253ec8b3ace0" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3214.414087] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3214.414247] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3214.414602] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3214.414886] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9f8ae90-f577-4cd5-ad93-24e60c5027b6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3214.419117] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3214.419117] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528c9db1-703f-09c4-3ee8-1343a66c62b7" [ 3214.419117] env[61964]: _type = "Task" [ 3214.419117] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3214.426306] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528c9db1-703f-09c4-3ee8-1343a66c62b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3214.929251] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528c9db1-703f-09c4-3ee8-1343a66c62b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009875} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3214.929576] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554 is no longer used. Deleting! [ 3214.929698] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3214.929958] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7179f2d4-6b25-441e-8c98-e59535fdcbcd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3214.937276] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3214.937276] env[61964]: value = "task-1688838" [ 3214.937276] env[61964]: _type = "Task" [ 3214.937276] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3214.945963] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3215.447374] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109281} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3215.447635] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3215.447768] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/b07e66cf-7e13-46ad-9937-0a0269fdd554" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3215.447990] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3215.448135] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3215.448458] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3215.448718] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf0cf072-49fd-4147-b9b6-9cae5cb21a77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3215.452815] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3215.452815] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52fc12a5-385b-3ded-c6a7-60ac52cd7258" [ 3215.452815] env[61964]: _type = "Task" [ 3215.452815] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3215.459676] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52fc12a5-385b-3ded-c6a7-60ac52cd7258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3215.963870] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52fc12a5-385b-3ded-c6a7-60ac52cd7258, 'name': SearchDatastore_Task, 'duration_secs': 0.009064} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3215.964211] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb is no longer used. Deleting! [ 3215.964356] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3215.964623] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e924a74-3ff5-4ffe-bfd9-fb78f8c18e6a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3215.971649] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3215.971649] env[61964]: value = "task-1688839" [ 3215.971649] env[61964]: _type = "Task" [ 3215.971649] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3215.978809] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3216.481475] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111907} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3216.481684] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3216.481857] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/88659aa9-4c13-4956-bae3-26788018e6cb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3216.482081] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3216.482201] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3216.482551] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3216.482818] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a18906-8352-4b1e-804c-7e95a7b2825e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3216.486981] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3216.486981] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52416823-a0b4-9f9a-ebbc-f4b137b2c2ba" [ 3216.486981] env[61964]: _type = "Task" [ 3216.486981] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3216.493834] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52416823-a0b4-9f9a-ebbc-f4b137b2c2ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3216.997816] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52416823-a0b4-9f9a-ebbc-f4b137b2c2ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008671} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3216.998171] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3216.998411] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3216.998544] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3216.998856] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3216.999145] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10e8f7e-2c42-4ac1-a65e-df2cdbe56231 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3217.003513] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3217.003513] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525aefa7-2751-d8e9-83c7-505347c18f4f" [ 3217.003513] env[61964]: _type = "Task" [ 3217.003513] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3217.011283] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525aefa7-2751-d8e9-83c7-505347c18f4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3217.513634] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525aefa7-2751-d8e9-83c7-505347c18f4f, 'name': SearchDatastore_Task, 'duration_secs': 0.010106} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3217.513991] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87 is no longer used. Deleting! [ 3217.514124] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3217.514375] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e97c10f4-bb1f-4dda-aad5-c92208bd5a43 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3217.520314] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3217.520314] env[61964]: value = "task-1688840" [ 3217.520314] env[61964]: _type = "Task" [ 3217.520314] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3217.528233] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3218.030971] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102036} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3218.031313] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3218.031408] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/16953e03-c700-482e-8f94-16e0db0a0a87" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3218.031634] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3218.031862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3218.032077] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3218.032367] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e63eba31-67c0-4ae1-8d08-ca96dd828aaa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3218.038416] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3218.038416] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bcfc4e-2783-d104-4db8-da3ef960e014" [ 3218.038416] env[61964]: _type = "Task" [ 3218.038416] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3218.045076] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bcfc4e-2783-d104-4db8-da3ef960e014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3218.547457] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bcfc4e-2783-d104-4db8-da3ef960e014, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3218.547733] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf is no longer used. Deleting! [ 3218.547882] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3218.548160] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76cfd209-5414-4f08-b651-619c27e5aa9b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3218.554897] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3218.554897] env[61964]: value = "task-1688841" [ 3218.554897] env[61964]: _type = "Task" [ 3218.554897] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3218.562084] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3219.064792] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095235} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3219.065242] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3219.065384] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3219.065508] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3219.065616] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3219.065940] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3219.066224] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df3ef548-a7df-49d4-8b3b-08805009acb8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3219.070612] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3219.070612] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b3e40d-276a-6593-bbcf-95a769909b4a" [ 3219.070612] env[61964]: _type = "Task" [ 3219.070612] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3219.078521] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b3e40d-276a-6593-bbcf-95a769909b4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3219.581812] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b3e40d-276a-6593-bbcf-95a769909b4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011396} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3219.582755] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403 is no longer used. Deleting! [ 3219.582755] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3219.582755] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82055d44-fe87-48ed-a981-ca04668d2c56 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3219.589338] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3219.589338] env[61964]: value = "task-1688842" [ 3219.589338] env[61964]: _type = "Task" [ 3219.589338] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3219.596479] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3220.099272] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103946} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3220.099765] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3220.099765] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/9ff02d4e-865c-4c47-bb6c-fc1e0ba36403" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3220.099915] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3220.100019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3220.100744] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3220.101017] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4078bf7-d05e-4187-b45b-4c628a9f89b2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3220.105202] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3220.105202] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528fb37b-e791-62d2-6101-6fcad0ee2c92" [ 3220.105202] env[61964]: _type = "Task" [ 3220.105202] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3220.112198] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528fb37b-e791-62d2-6101-6fcad0ee2c92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3220.617322] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528fb37b-e791-62d2-6101-6fcad0ee2c92, 'name': SearchDatastore_Task, 'duration_secs': 0.008907} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3220.617490] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb is no longer used. Deleting! [ 3220.617624] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3220.617865] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6240e8d7-9f04-4da6-afba-0c7c70ed7115 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3220.624520] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3220.624520] env[61964]: value = "task-1688843" [ 3220.624520] env[61964]: _type = "Task" [ 3220.624520] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3220.631353] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3221.134102] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11063} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3221.134468] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3221.134507] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/89f6e10c-947c-4b17-9105-0ce87b9b97bb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3221.134731] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3221.134850] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3221.135177] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3221.135438] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe80742-12c7-4d3f-9b47-cd69e69ff0aa {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3221.139620] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3221.139620] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521226d4-4a2f-ff06-f871-6dc17b8b723f" [ 3221.139620] env[61964]: _type = "Task" [ 3221.139620] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3221.146853] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521226d4-4a2f-ff06-f871-6dc17b8b723f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3221.659879] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521226d4-4a2f-ff06-f871-6dc17b8b723f, 'name': SearchDatastore_Task, 'duration_secs': 0.011436} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3221.660232] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3221.660454] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3221.660579] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3221.660897] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3221.661193] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-346ba2cb-117c-44f5-8ca5-53c8e9248171 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3221.665605] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3221.665605] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52087aab-c871-5ced-7c7f-3ab59601aaa1" [ 3221.665605] env[61964]: _type = "Task" [ 3221.665605] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3221.678088] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52087aab-c871-5ced-7c7f-3ab59601aaa1, 'name': SearchDatastore_Task, 'duration_secs': 0.008924} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3221.678371] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491 is no longer used. Deleting! [ 3221.678519] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3221.678767] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-077e5692-9bcf-4c1f-83fc-abf10363d76f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3221.685082] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3221.685082] env[61964]: value = "task-1688844" [ 3221.685082] env[61964]: _type = "Task" [ 3221.685082] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3221.692299] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3222.195018] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100542} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3222.195373] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3222.195455] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f827ddce-64be-4942-99b9-646e8635a491" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3222.195653] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3222.195768] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3222.196101] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3222.196370] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-778fd93f-9a88-4693-afde-66f2cbbb4ba1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3222.200699] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3222.200699] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5240b5af-b578-2b8c-60b6-e3866146d440" [ 3222.200699] env[61964]: _type = "Task" [ 3222.200699] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3222.207967] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5240b5af-b578-2b8c-60b6-e3866146d440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3222.713042] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5240b5af-b578-2b8c-60b6-e3866146d440, 'name': SearchDatastore_Task, 'duration_secs': 0.008902} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3222.713356] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf is no longer used. Deleting! [ 3222.713508] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3222.713767] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-708d40b8-f9dd-418a-9e10-0d538b25c0ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3222.719600] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3222.719600] env[61964]: value = "task-1688845" [ 3222.719600] env[61964]: _type = "Task" [ 3222.719600] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3222.728144] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3223.229554] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093668} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3223.229915] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3223.229955] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3223.230163] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3223.230282] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3223.230589] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3223.230851] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42bb822c-29f6-4d1d-8168-ee9b374056c8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3223.235293] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3223.235293] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a9e783-2131-478d-70f3-2a5c182aafed" [ 3223.235293] env[61964]: _type = "Task" [ 3223.235293] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3223.242412] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a9e783-2131-478d-70f3-2a5c182aafed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3223.746029] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a9e783-2131-478d-70f3-2a5c182aafed, 'name': SearchDatastore_Task, 'duration_secs': 0.00833} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3223.746029] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada/ts-2024-11-29-16-09-42 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3223.746207] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b84f8f67-b6ee-4292-87da-d5710ed8c92f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3223.758651] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada/ts-2024-11-29-16-09-42 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3223.758651] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 24b48019-91e2-405c-8754-f8489ab8bada is no longer used by this node. Pending deletion! [ 3223.758651] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3223.758651] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3223.758651] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3223.758899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3223.759037] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efffe2b7-8d94-4c8a-b59c-d84c51de0165 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3223.763153] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3223.763153] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f83223-e638-c709-9d8e-4fb00ce2749c" [ 3223.763153] env[61964]: _type = "Task" [ 3223.763153] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3223.770511] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f83223-e638-c709-9d8e-4fb00ce2749c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3224.273962] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f83223-e638-c709-9d8e-4fb00ce2749c, 'name': SearchDatastore_Task, 'duration_secs': 0.008808} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3224.274333] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249/ts-2024-11-29-16-09-43 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3224.274549] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0539e90d-8ff0-4f28-9941-578d6f2bd4ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3224.301891] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249/ts-2024-11-29-16-09-43 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3224.302085] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 3955961f-c4fc-4fe2-9399-49b36b0f8249 is no longer used by this node. Pending deletion! [ 3224.302231] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3224.302459] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3224.302577] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3224.302954] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3224.303265] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5fec872-b48f-47cd-a7a7-8521da927a7a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3224.314972] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3224.314972] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227bf1a-3317-dab8-4fb6-c2efbecf0ba8" [ 3224.314972] env[61964]: _type = "Task" [ 3224.314972] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3224.325836] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227bf1a-3317-dab8-4fb6-c2efbecf0ba8, 'name': SearchDatastore_Task, 'duration_secs': 0.012056} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3224.325836] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af is no longer used. Deleting! [ 3224.325836] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3224.325836] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4d19b50-5039-4622-9b74-2b088127361c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3224.332354] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3224.332354] env[61964]: value = "task-1688846" [ 3224.332354] env[61964]: _type = "Task" [ 3224.332354] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3224.343929] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3224.841918] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109998} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3224.842156] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3224.842396] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ee508dac-7de1-40f8-b397-73f8ce9ca2af" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3224.842657] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3224.846016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3224.846016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3224.846016] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-457b9a76-29c1-45f4-ae53-4f5533983536 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3224.847668] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3224.847668] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5213b764-632d-d0bd-5cd9-3942d66f8035" [ 3224.847668] env[61964]: _type = "Task" [ 3224.847668] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3224.856012] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5213b764-632d-d0bd-5cd9-3942d66f8035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3225.358394] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5213b764-632d-d0bd-5cd9-3942d66f8035, 'name': SearchDatastore_Task, 'duration_secs': 0.008901} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3225.358886] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd/ts-2024-11-29-16-09-44 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3225.358975] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-364ebdb5-6a14-4f61-81df-86f7a764f065 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3225.370250] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd/ts-2024-11-29-16-09-44 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3225.370434] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image d31f4473-c51c-4050-a140-362e830b0dbd is no longer used by this node. Pending deletion! [ 3225.370552] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3225.370766] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3225.370885] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3225.371211] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3225.371498] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5969c76-70b7-45ae-810a-e2909f23f518 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3225.375549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3225.375549] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2e354-b381-2ce3-f11b-2a6ee495787d" [ 3225.375549] env[61964]: _type = "Task" [ 3225.375549] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3225.382903] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2e354-b381-2ce3-f11b-2a6ee495787d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3225.886640] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2e354-b381-2ce3-f11b-2a6ee495787d, 'name': SearchDatastore_Task, 'duration_secs': 0.008286} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3225.886849] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121 is no longer used. Deleting! [ 3225.886996] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3225.887284] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-410287c3-3b8b-4091-b787-0172d1d5743e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3225.893536] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3225.893536] env[61964]: value = "task-1688847" [ 3225.893536] env[61964]: _type = "Task" [ 3225.893536] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3225.901663] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3226.403052] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102152} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3226.403397] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3226.403397] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a993637e-3303-4cdb-a53a-cc2dd5d09121" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3226.403609] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3226.403729] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3226.404051] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3226.404314] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9253d68c-cc9f-4860-aad9-2b1b78c100b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3226.408579] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3226.408579] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ccf4fb-f434-409b-36e5-352fc005892d" [ 3226.408579] env[61964]: _type = "Task" [ 3226.408579] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3226.416944] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ccf4fb-f434-409b-36e5-352fc005892d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3226.919119] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ccf4fb-f434-409b-36e5-352fc005892d, 'name': SearchDatastore_Task, 'duration_secs': 0.009137} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3226.919441] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b is no longer used. Deleting! [ 3226.919589] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3226.919848] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fd56d30-fa5a-40ab-a91d-2b8f5e27c3ab {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3226.925892] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3226.925892] env[61964]: value = "task-1688848" [ 3226.925892] env[61964]: _type = "Task" [ 3226.925892] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3226.933298] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3227.439027] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097493} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3227.439027] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3227.439027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3227.439027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3227.439027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3227.439027] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3227.439027] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d21d6cbc-c325-4cad-a4e4-8da5c5b41947 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3227.444019] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3227.444019] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52609845-d2cd-7ed1-eb45-3e0e31b2ccbd" [ 3227.444019] env[61964]: _type = "Task" [ 3227.444019] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3227.448791] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52609845-d2cd-7ed1-eb45-3e0e31b2ccbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3227.952061] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52609845-d2cd-7ed1-eb45-3e0e31b2ccbd, 'name': SearchDatastore_Task, 'duration_secs': 0.00876} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3227.952292] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082 is no longer used. Deleting! [ 3227.952292] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3227.952536] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7322e4b-7fc7-4fdf-b91e-5ee8ac665865 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3227.958365] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3227.958365] env[61964]: value = "task-1688849" [ 3227.958365] env[61964]: _type = "Task" [ 3227.958365] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3227.965687] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3228.468313] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096546} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3228.468648] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3228.468648] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a35554f1-efed-47cb-8fa7-390c6699d082" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3228.468871] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3228.468990] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3228.469407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3228.469683] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81601aeb-2b80-45fb-8b96-e63f94b6486f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3228.474139] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3228.474139] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270032f-937e-958e-7d73-6f7b87783326" [ 3228.474139] env[61964]: _type = "Task" [ 3228.474139] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3228.481184] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270032f-937e-958e-7d73-6f7b87783326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3228.984628] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270032f-937e-958e-7d73-6f7b87783326, 'name': SearchDatastore_Task, 'duration_secs': 0.008682} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3228.984889] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca is no longer used. Deleting! [ 3228.985029] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3228.985292] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3708c849-8e5e-4610-898f-713a66de960b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3228.991299] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3228.991299] env[61964]: value = "task-1688850" [ 3228.991299] env[61964]: _type = "Task" [ 3228.991299] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3228.998769] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3229.501316] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107874} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3229.501672] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3229.501767] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d046ff73-a9ed-4d67-b9e2-fbfa9354faca" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3229.501982] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3229.502117] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3229.502447] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3229.502707] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-102383aa-e0ee-4e94-9d37-fb4e08e4baa6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3229.506980] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3229.506980] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f1f176-a1f3-efe8-5a41-7272e148867a" [ 3229.506980] env[61964]: _type = "Task" [ 3229.506980] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3229.514043] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f1f176-a1f3-efe8-5a41-7272e148867a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3230.018073] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f1f176-a1f3-efe8-5a41-7272e148867a, 'name': SearchDatastore_Task, 'duration_secs': 0.00875} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3230.018405] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6 is no longer used. Deleting! [ 3230.018557] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3230.018828] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee187e60-cd8a-48bc-98a2-12a87bd81100 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3230.025875] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3230.025875] env[61964]: value = "task-1688851" [ 3230.025875] env[61964]: _type = "Task" [ 3230.025875] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3230.034754] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3230.535630] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121324} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3230.535907] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3230.536018] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/9459c716-c438-4b3a-8a29-4ccec581edf6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3230.536250] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3230.536361] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3230.536669] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3230.536936] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d3f3f72-2bca-4554-8b76-590cd5d75334 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3230.541258] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3230.541258] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a63c-ab48-d4ea-6914-7a9fdd096e49" [ 3230.541258] env[61964]: _type = "Task" [ 3230.541258] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3230.549341] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a63c-ab48-d4ea-6914-7a9fdd096e49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3231.051990] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a63c-ab48-d4ea-6914-7a9fdd096e49, 'name': SearchDatastore_Task, 'duration_secs': 0.00871} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3231.052300] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764/ts-2024-11-29-16-09-49 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3231.052541] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80df1482-3ccd-4970-8194-71d62b60a723 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3231.064432] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764/ts-2024-11-29-16-09-49 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3231.064549] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 58135848-a925-492c-b0c1-e1dac9636764 is no longer used by this node. Pending deletion! [ 3231.064695] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/58135848-a925-492c-b0c1-e1dac9636764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3231.064899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3231.065039] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3231.065354] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3231.065580] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d185ca59-3db7-416a-935d-c7b6a180fd30 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3231.069674] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3231.069674] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529ecef7-e755-aa32-7f7c-e0f2e3edbf0b" [ 3231.069674] env[61964]: _type = "Task" [ 3231.069674] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3231.076975] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529ecef7-e755-aa32-7f7c-e0f2e3edbf0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3231.580511] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529ecef7-e755-aa32-7f7c-e0f2e3edbf0b, 'name': SearchDatastore_Task, 'duration_secs': 0.008567} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3231.580820] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8 is no longer used. Deleting! [ 3231.580964] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3231.581232] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3ec2a39-4502-4dab-bb59-269c8ae1d50b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3231.587770] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3231.587770] env[61964]: value = "task-1688852" [ 3231.587770] env[61964]: _type = "Task" [ 3231.587770] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3231.594950] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3232.097466] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109009} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3232.097679] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3232.097857] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f9dcbbe2-656d-4a4d-b965-2017807dcfe8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3232.098087] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3232.098208] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3232.098537] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3232.098802] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46ab38a9-5ac7-4390-97e2-740538c79ce2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3232.102952] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3232.102952] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f7c8ef-fc76-6259-6be7-3ad5bcb54dab" [ 3232.102952] env[61964]: _type = "Task" [ 3232.102952] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3232.109858] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f7c8ef-fc76-6259-6be7-3ad5bcb54dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3232.613984] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f7c8ef-fc76-6259-6be7-3ad5bcb54dab, 'name': SearchDatastore_Task, 'duration_secs': 0.009462} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3232.614315] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2 is no longer used. Deleting! [ 3232.614445] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3232.614705] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eba68b1b-dc77-4731-9f33-5af315b670f0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3232.620471] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3232.620471] env[61964]: value = "task-1688853" [ 3232.620471] env[61964]: _type = "Task" [ 3232.620471] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3232.627535] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3233.129763] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101317} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3233.129990] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3233.130162] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/02519942-0d31-4802-94ee-ce47bece81b2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3233.130378] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3233.130530] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3233.130904] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3233.131181] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a56033-15ff-450f-8747-698665d064c0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3233.135305] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3233.135305] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524992de-d7a9-1f2b-0f12-282cd427fcf5" [ 3233.135305] env[61964]: _type = "Task" [ 3233.135305] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3233.142654] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524992de-d7a9-1f2b-0f12-282cd427fcf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3233.646532] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524992de-d7a9-1f2b-0f12-282cd427fcf5, 'name': SearchDatastore_Task, 'duration_secs': 0.008849} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3233.646960] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430 is no longer used. Deleting! [ 3233.647020] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3233.647287] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6bbe06f-5690-45b5-a878-442d4a6cb9d1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3233.653368] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3233.653368] env[61964]: value = "task-1688854" [ 3233.653368] env[61964]: _type = "Task" [ 3233.653368] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3233.661364] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3233.958980] env[61964]: DEBUG oslo_concurrency.lockutils [None req-07e893cb-aa23-4254-842e-e5a2dbb754db tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquiring lock "23ae2618-093b-49a7-b3e4-3d8038e08cc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3234.164384] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106292} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3234.164619] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3234.164788] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d32c5459-b6f0-4e3c-ada0-b1599c90f430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3234.165012] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3234.165140] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3234.165443] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3234.165704] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be186193-b155-4703-bd7e-bba8008d8d1c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3234.169832] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3234.169832] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d42856-3102-af0f-347d-05dfcff08e87" [ 3234.169832] env[61964]: _type = "Task" [ 3234.169832] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3234.176953] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d42856-3102-af0f-347d-05dfcff08e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3234.681224] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d42856-3102-af0f-347d-05dfcff08e87, 'name': SearchDatastore_Task, 'duration_secs': 0.008653} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3234.681704] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399/ts-2024-11-29-16-09-53 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3234.681771] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6cd0e8f-8e16-4ea9-9719-71551b13e583 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3234.806886] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399/ts-2024-11-29-16-09-53 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3234.807066] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 38bbea46-58a2-484f-98ff-92968c526399 is no longer used by this node. Pending deletion! [ 3234.807245] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3234.807460] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3234.807579] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3234.807919] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3234.808221] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66d38d08-1f34-46e6-8268-47f235e27108 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3234.812930] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3234.812930] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522c4344-026f-8b35-c76e-a01d72a5697a" [ 3234.812930] env[61964]: _type = "Task" [ 3234.812930] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3234.820269] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522c4344-026f-8b35-c76e-a01d72a5697a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3235.323127] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522c4344-026f-8b35-c76e-a01d72a5697a, 'name': SearchDatastore_Task, 'duration_secs': 0.016711} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3235.323409] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80 is no longer used. Deleting! [ 3235.323566] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3235.323817] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b415112d-8b84-421e-a129-dd1ef52fea59 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3235.329691] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3235.329691] env[61964]: value = "task-1688855" [ 3235.329691] env[61964]: _type = "Task" [ 3235.329691] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3235.336889] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3235.839734] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155819} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3235.840089] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3235.840186] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4707c2d7-7e3e-47da-b2a3-631f0660fb80" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3235.840378] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3235.840495] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3235.840826] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3235.841201] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8875b2db-8461-43f2-89c9-29fc4f7d82c4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3235.845608] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3235.845608] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52222862-f087-3e74-b7a4-ba9b079db683" [ 3235.845608] env[61964]: _type = "Task" [ 3235.845608] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3235.853049] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52222862-f087-3e74-b7a4-ba9b079db683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3236.356235] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52222862-f087-3e74-b7a4-ba9b079db683, 'name': SearchDatastore_Task, 'duration_secs': 0.009201} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3236.356499] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7 is no longer used. Deleting! [ 3236.356645] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3236.356901] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eea1e0db-523d-4085-a3c1-e605c6dbc338 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3236.363212] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3236.363212] env[61964]: value = "task-1688856" [ 3236.363212] env[61964]: _type = "Task" [ 3236.363212] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3236.370480] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3236.872984] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103353} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3236.873372] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3236.873412] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/dbc1bdb2-818a-4f0d-a4b1-3a1a0a7482b7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3236.873619] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3236.873745] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3236.874064] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3236.874330] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be2cd468-8c0d-40e8-a9f0-17131742c7dd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3236.878525] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3236.878525] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527ce0f4-39ce-7a6f-1ded-a846f0423130" [ 3236.878525] env[61964]: _type = "Task" [ 3236.878525] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3236.885945] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527ce0f4-39ce-7a6f-1ded-a846f0423130, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3237.389583] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527ce0f4-39ce-7a6f-1ded-a846f0423130, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3237.389832] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e/ts-2024-11-29-16-09-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3237.390108] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2fdab31-a644-4193-af98-3f69fecc5cb7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3237.402613] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e/ts-2024-11-29-16-09-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3237.402756] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 4f561b7f-8554-40fc-b0ce-106ef7df334e is no longer used by this node. Pending deletion! [ 3237.402918] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3237.403142] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3237.403260] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3237.403580] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3237.403814] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5863e7-c9ad-4c83-b4a3-42ed89924799 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3237.407640] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3237.407640] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5235f472-78cb-dee3-1a27-bbae9b46fd84" [ 3237.407640] env[61964]: _type = "Task" [ 3237.407640] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3237.414878] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5235f472-78cb-dee3-1a27-bbae9b46fd84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3237.917878] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5235f472-78cb-dee3-1a27-bbae9b46fd84, 'name': SearchDatastore_Task, 'duration_secs': 0.008209} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3237.918239] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19/ts-2024-11-29-16-09-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3237.918453] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e79a9c88-6fe4-47f2-a301-123b1936c361 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3237.930902] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19/ts-2024-11-29-16-09-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3237.931051] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 8c397d6a-2255-40a1-a544-a5131382ed19 is no longer used by this node. Pending deletion! [ 3237.931216] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3237.931417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3237.931553] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3237.931861] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3237.932102] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1329585-35ce-4d59-a8c0-09f71acb3efe {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3237.936764] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3237.936764] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270997f-7fe4-e01d-663b-dc2a64d34e7c" [ 3237.936764] env[61964]: _type = "Task" [ 3237.936764] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3237.944164] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270997f-7fe4-e01d-663b-dc2a64d34e7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3238.059900] env[61964]: WARNING oslo_vmware.rw_handles [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3238.059900] env[61964]: ERROR oslo_vmware.rw_handles [ 3238.060298] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3238.062361] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3238.062614] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Copying Virtual Disk [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/127a0994-aea4-4346-ab75-c2634dd2f9b5/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3238.063201] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff6c6e3e-a44e-42af-af32-aa38a04dcb2c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.071304] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for the task: (returnval){ [ 3238.071304] env[61964]: value = "task-1688857" [ 3238.071304] env[61964]: _type = "Task" [ 3238.071304] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3238.080025] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Task: {'id': task-1688857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3238.447549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5270997f-7fe4-e01d-663b-dc2a64d34e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.008516} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3238.447797] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956/ts-2024-11-29-16-09-57 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3238.448078] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dc1f525-f550-4ea1-ac16-c621a9e33294 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.460407] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956/ts-2024-11-29-16-09-57 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3238.460407] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 9fb3fb5b-a04a-46bc-b0f8-95473e273956 is no longer used by this node. Pending deletion! [ 3238.460407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/9fb3fb5b-a04a-46bc-b0f8-95473e273956" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3238.460407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3238.460407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3238.460407] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3238.460688] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726a2d1d-6ce4-4cdb-8f58-d2155e996a5d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.464707] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3238.464707] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520db328-e5f1-1adf-e501-6ecf04d021f3" [ 3238.464707] env[61964]: _type = "Task" [ 3238.464707] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3238.472151] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520db328-e5f1-1adf-e501-6ecf04d021f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3238.581378] env[61964]: DEBUG oslo_vmware.exceptions [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3238.581669] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3238.582213] env[61964]: ERROR nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3238.582213] env[61964]: Faults: ['InvalidArgument'] [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Traceback (most recent call last): [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] yield resources [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self.driver.spawn(context, instance, image_meta, [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self._fetch_image_if_missing(context, vi) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] image_cache(vi, tmp_image_ds_loc) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] vm_util.copy_virtual_disk( [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] session._wait_for_task(vmdk_copy_task) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return self.wait_for_task(task_ref) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return evt.wait() [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] result = hub.switch() [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return self.greenlet.switch() [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self.f(*self.args, **self.kw) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] raise exceptions.translate_fault(task_info.error) [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Faults: ['InvalidArgument'] [ 3238.582213] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] [ 3238.583732] env[61964]: INFO nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Terminating instance [ 3238.584069] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3238.584276] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3238.584506] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05e8c033-33ba-4f25-8760-2d35e477c5e5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.586705] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3238.586893] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3238.587585] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eebb371-41cd-4c31-b625-04e48a84f126 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.593949] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3238.594168] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdfce0b9-4e0a-4462-80b5-3d7c7c683d8f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.596103] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3238.596277] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3238.597173] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8d3ee32-b5f6-4489-96f2-d1b9caa0e0d8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.602292] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 3238.602292] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ce4c9b-0aeb-9c97-6bd3-7f76f908edfb" [ 3238.602292] env[61964]: _type = "Task" [ 3238.602292] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3238.608993] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ce4c9b-0aeb-9c97-6bd3-7f76f908edfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3238.669720] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3238.669962] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3238.670130] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Deleting the datastore file [datastore1] 3352530a-f799-4a76-9e18-86ab1bd96d2e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3238.670395] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c8ea3a-3439-4e32-a136-99d3de14bd77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.677795] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for the task: (returnval){ [ 3238.677795] env[61964]: value = "task-1688859" [ 3238.677795] env[61964]: _type = "Task" [ 3238.677795] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3238.685305] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Task: {'id': task-1688859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3238.975476] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]520db328-e5f1-1adf-e501-6ecf04d021f3, 'name': SearchDatastore_Task, 'duration_secs': 0.008603} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3238.975863] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9 is no longer used. Deleting! [ 3238.975994] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3238.976193] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a69769be-d82f-4001-9944-090dbd099771 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3238.982011] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3238.982011] env[61964]: value = "task-1688860" [ 3238.982011] env[61964]: _type = "Task" [ 3238.982011] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3238.989138] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688860, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3239.112988] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3239.112988] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating directory with path [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3239.112988] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f227112-1308-40b1-88f3-fce536811b06 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.123942] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Created directory with path [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3239.124162] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Fetch image to [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3239.124338] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3239.125054] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76910a32-ecad-482b-acb9-17c066a121e7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.131670] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aca8e54-58a2-46e0-900a-fdc0e61220fc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.140361] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4cdada-67f2-4bf2-9049-4eca3e00082c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.186774] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04eed6c5-6c65-4d23-924f-9025942c1967 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.194348] env[61964]: DEBUG oslo_vmware.api [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Task: {'id': task-1688859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064236} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3239.196035] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3239.196236] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3239.196408] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3239.196578] env[61964]: INFO nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 3239.198691] env[61964]: DEBUG nova.compute.claims [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3239.198859] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3239.199089] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3239.201770] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6c54de04-169f-4828-b748-eafc5db44e87 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.225439] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3239.302229] env[61964]: DEBUG oslo_vmware.rw_handles [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3239.363206] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Refreshing inventories for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 3239.367363] env[61964]: DEBUG oslo_vmware.rw_handles [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3239.367651] env[61964]: DEBUG oslo_vmware.rw_handles [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3239.380021] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updating ProviderTree inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 3239.380286] env[61964]: DEBUG nova.compute.provider_tree [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3239.391728] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Refreshing aggregate associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, aggregates: None {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 3239.409239] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Refreshing trait associations for resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=61964) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 3239.494702] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688860, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101563} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3239.495038] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3239.495279] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6387c3e1-2124-44a1-843f-95f6f0b195c9" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3239.495663] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3239.495823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3239.496823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3239.496823] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2977453-7848-4931-912a-f3db4f00f2be {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.501812] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3239.501812] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528e4a56-c9d0-5ed2-b689-bbf3b621f981" [ 3239.501812] env[61964]: _type = "Task" [ 3239.501812] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3239.506360] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39844b81-33af-4464-bb93-0a447195bbd4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.517176] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]528e4a56-c9d0-5ed2-b689-bbf3b621f981, 'name': SearchDatastore_Task, 'duration_secs': 0.009938} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3239.517820] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282 is no longer used. Deleting! [ 3239.517820] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3239.518594] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e0b6dd-626b-4c55-8a1b-ee2597ad4e8f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.527322] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e27e4667-645c-49bc-afb5-25b6b59839f9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.579175] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5933c66-bedd-4f3f-b4d0-49b2b3cd97b3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.583141] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3239.583141] env[61964]: value = "task-1688861" [ 3239.583141] env[61964]: _type = "Task" [ 3239.583141] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3239.590533] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ea8488-dc28-4003-b56a-3a19eb445386 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3239.600545] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3239.612927] env[61964]: DEBUG nova.compute.provider_tree [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3239.651907] env[61964]: DEBUG nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updated inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 3239.652203] env[61964]: DEBUG nova.compute.provider_tree [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 103 to 104 during operation: update_inventory {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 3239.652390] env[61964]: DEBUG nova.compute.provider_tree [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3239.668139] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.469s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3239.668699] env[61964]: ERROR nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3239.668699] env[61964]: Faults: ['InvalidArgument'] [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Traceback (most recent call last): [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self.driver.spawn(context, instance, image_meta, [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self._fetch_image_if_missing(context, vi) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] image_cache(vi, tmp_image_ds_loc) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] vm_util.copy_virtual_disk( [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] session._wait_for_task(vmdk_copy_task) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return self.wait_for_task(task_ref) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return evt.wait() [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] result = hub.switch() [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] return self.greenlet.switch() [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] self.f(*self.args, **self.kw) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] raise exceptions.translate_fault(task_info.error) [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Faults: ['InvalidArgument'] [ 3239.668699] env[61964]: ERROR nova.compute.manager [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] [ 3239.669980] env[61964]: DEBUG nova.compute.utils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3239.670920] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Build of instance 3352530a-f799-4a76-9e18-86ab1bd96d2e was re-scheduled: A specified parameter was not correct: fileType [ 3239.670920] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3239.671304] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3239.671494] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3239.671678] env[61964]: DEBUG nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3239.671849] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3239.985938] env[61964]: DEBUG nova.network.neutron [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3240.002057] env[61964]: INFO nova.compute.manager [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Took 0.33 seconds to deallocate network for instance. [ 3240.093833] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106785} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3240.094078] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3240.094255] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5fbbc812-0d18-4cac-9244-670b711fc282" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3240.094480] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3240.094600] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3240.094920] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3240.095209] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-671f3117-d381-46e9-8262-7c43f3482090 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3240.102427] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3240.102427] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523f41fc-594d-f586-f941-ecb19bcd4e4a" [ 3240.102427] env[61964]: _type = "Task" [ 3240.102427] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3240.110667] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523f41fc-594d-f586-f941-ecb19bcd4e4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3240.128949] env[61964]: INFO nova.scheduler.client.report [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Deleted allocations for instance 3352530a-f799-4a76-9e18-86ab1bd96d2e [ 3240.154798] env[61964]: DEBUG oslo_concurrency.lockutils [None req-cec2d71d-c115-47b8-80e5-8186976a9628 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 470.709s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3240.155073] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 274.511s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3240.155306] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Acquiring lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3240.155561] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3240.155664] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3240.157706] env[61964]: INFO nova.compute.manager [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Terminating instance [ 3240.159491] env[61964]: DEBUG nova.compute.manager [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3240.159682] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3240.160148] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3157826-eac3-48d7-b2ae-d980e4df1f6d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3240.170021] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba2069b-4737-44f0-a813-d6a1c2a89401 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3240.195445] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3352530a-f799-4a76-9e18-86ab1bd96d2e could not be found. [ 3240.195647] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3240.195819] env[61964]: INFO nova.compute.manager [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3240.196072] env[61964]: DEBUG oslo.service.loopingcall [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3240.196514] env[61964]: DEBUG nova.compute.manager [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3240.196619] env[61964]: DEBUG nova.network.neutron [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3240.219062] env[61964]: DEBUG nova.network.neutron [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3240.227389] env[61964]: INFO nova.compute.manager [-] [instance: 3352530a-f799-4a76-9e18-86ab1bd96d2e] Took 0.03 seconds to deallocate network for instance. [ 3240.314439] env[61964]: DEBUG oslo_concurrency.lockutils [None req-b3de98a3-0a78-4743-8f31-e79c8aeda6d1 tempest-ServersTestJSON-1034672809 tempest-ServersTestJSON-1034672809-project-member] Lock "3352530a-f799-4a76-9e18-86ab1bd96d2e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3240.613178] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523f41fc-594d-f586-f941-ecb19bcd4e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.00944} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3240.613496] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793 is no longer used. Deleting! [ 3240.613639] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3240.613923] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb3ce7ba-ce91-48eb-bff1-79dfbbd1e973 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3240.620378] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3240.620378] env[61964]: value = "task-1688862" [ 3240.620378] env[61964]: _type = "Task" [ 3240.620378] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3240.628901] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688862, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3241.129671] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688862, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107152} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3241.130726] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3241.130726] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/eea13f5c-e753-45a0-a935-c153e57db793" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3241.130855] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3241.130855] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3241.131218] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3241.131525] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4bfad4c-9c4c-4a17-a967-9c890b838420 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3241.135956] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3241.135956] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227da03-fdf0-8824-22be-5fa2bc6ec0ca" [ 3241.135956] env[61964]: _type = "Task" [ 3241.135956] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3241.143690] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227da03-fdf0-8824-22be-5fa2bc6ec0ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3241.647550] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5227da03-fdf0-8824-22be-5fa2bc6ec0ca, 'name': SearchDatastore_Task, 'duration_secs': 0.009141} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3241.647939] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817 is no longer used. Deleting! [ 3241.648156] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3241.648464] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1114cbb-3b14-4fdc-a25b-7c68780fc381 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3241.654708] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3241.654708] env[61964]: value = "task-1688863" [ 3241.654708] env[61964]: _type = "Task" [ 3241.654708] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3241.663014] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3242.166492] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115489} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3242.166492] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3242.166492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5664763a-1179-4b25-be50-515127b6f817" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3242.166492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3242.166492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3242.167052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3242.167052] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f68fd7fa-422f-4a08-ad1a-c400d2905b3d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3242.171876] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3242.171876] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f79c47-c7c7-b3fc-77e4-fa8fc01e877b" [ 3242.171876] env[61964]: _type = "Task" [ 3242.171876] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3242.179501] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f79c47-c7c7-b3fc-77e4-fa8fc01e877b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3242.688538] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f79c47-c7c7-b3fc-77e4-fa8fc01e877b, 'name': SearchDatastore_Task, 'duration_secs': 0.011037} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3242.688974] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322 is no longer used. Deleting! [ 3242.689157] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3242.689763] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fce8d969-1c87-43d4-ab44-d6d32941cc78 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3242.696632] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3242.696632] env[61964]: value = "task-1688864" [ 3242.696632] env[61964]: _type = "Task" [ 3242.696632] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3242.704349] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3243.206809] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114112} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3243.207188] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3243.207275] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/815d7be5-ef60-481e-9952-be52bed24322" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3243.207433] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3243.207550] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3243.207880] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3243.208170] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a74a6c71-afad-43d8-b482-0f12f3639145 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3243.212356] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3243.212356] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282252b-75e6-b5ec-3ce6-1eea56aedfd1" [ 3243.212356] env[61964]: _type = "Task" [ 3243.212356] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3243.219330] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282252b-75e6-b5ec-3ce6-1eea56aedfd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3243.725960] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5282252b-75e6-b5ec-3ce6-1eea56aedfd1, 'name': SearchDatastore_Task, 'duration_secs': 0.008227} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3243.726260] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac/ts-2024-11-29-16-10-02 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3243.726526] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8988dad7-f009-4d8e-86f7-761fd9102db6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3243.739264] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac/ts-2024-11-29-16-10-02 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3243.739428] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 9247edf4-a122-44d1-be5f-7d303a52a4ac is no longer used by this node. Pending deletion! [ 3243.739612] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3243.739862] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3243.739980] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3243.740376] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3243.740640] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6588db9d-bc17-4f01-9785-ac8c7c80606a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3243.752164] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3243.752164] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bce6a-40b5-7ad9-549a-98257fddabf8" [ 3243.752164] env[61964]: _type = "Task" [ 3243.752164] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3243.767643] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bce6a-40b5-7ad9-549a-98257fddabf8, 'name': SearchDatastore_Task, 'duration_secs': 0.008857} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3243.768376] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044 is no longer used. Deleting! [ 3243.770540] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3243.770540] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcd64cd1-e3a4-4ad5-a336-12b8c509fd29 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3243.777023] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3243.777023] env[61964]: value = "task-1688865" [ 3243.777023] env[61964]: _type = "Task" [ 3243.777023] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3243.782608] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3244.283871] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112568} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3244.284622] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3244.287690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/86f22191-fc0e-40f3-9e49-017e9cb62044" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3244.287690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3244.287690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3244.287690] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3244.287690] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ca64a2b-58cf-4a18-b7ef-558baf25a3ad {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3244.293079] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3244.293079] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521d976a-8359-66ad-c863-5ce9714330ec" [ 3244.293079] env[61964]: _type = "Task" [ 3244.293079] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3244.298256] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521d976a-8359-66ad-c863-5ce9714330ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3244.803028] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521d976a-8359-66ad-c863-5ce9714330ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009005} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3244.803028] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55 is no longer used. Deleting! [ 3244.803028] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3244.803028] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3ce2cc2-3ce2-443b-a858-fa926d2dcef6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3244.812022] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3244.812022] env[61964]: value = "task-1688866" [ 3244.812022] env[61964]: _type = "Task" [ 3244.812022] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3244.817770] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3245.319817] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107922} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3245.320477] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3245.320801] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f99e7180-1aa6-4445-a796-8efce6a73c55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3245.321184] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3245.321416] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3245.321834] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3245.322214] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56fdbfa-d3dc-427e-a110-a58fa47333da {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3245.326512] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3245.326512] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5201ce38-8c02-a59f-2fed-b8cbb4e6a651" [ 3245.326512] env[61964]: _type = "Task" [ 3245.326512] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3245.334276] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5201ce38-8c02-a59f-2fed-b8cbb4e6a651, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3245.837589] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5201ce38-8c02-a59f-2fed-b8cbb4e6a651, 'name': SearchDatastore_Task, 'duration_secs': 0.008356} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3245.838065] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7/ts-2024-11-29-16-10-04 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3245.838477] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f4ff56a-2f85-4c1d-81d0-121c23bd48d4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3245.851062] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7/ts-2024-11-29-16-10-04 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3245.854016] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 7075f5cb-1c25-417a-8edf-759c68066be7 is no longer used by this node. Pending deletion! [ 3245.854016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3245.854016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3245.854016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3245.854016] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3245.854016] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c4a0504-ab40-4ff8-8bc9-e118a81acc8d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3245.857021] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3245.857021] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521203fe-673e-2938-92fe-0e1deb151376" [ 3245.857021] env[61964]: _type = "Task" [ 3245.857021] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3245.864772] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521203fe-673e-2938-92fe-0e1deb151376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3246.369037] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521203fe-673e-2938-92fe-0e1deb151376, 'name': SearchDatastore_Task, 'duration_secs': 0.00854} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3246.369369] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36/ts-2024-11-29-16-10-05 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3246.369677] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51abf666-1759-47df-baf5-7524f67b8bfc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3246.381677] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36/ts-2024-11-29-16-10-05 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3246.381862] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 0e624580-63c4-4d24-b13b-c9defad3fc36 is no longer used by this node. Pending deletion! [ 3246.381991] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3246.382223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3246.382343] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3246.382726] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3246.382974] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0de8c626-70fd-4b64-92e2-98a28541a868 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3246.387020] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3246.387020] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a99d7a-2279-597e-831c-16916fde80f2" [ 3246.387020] env[61964]: _type = "Task" [ 3246.387020] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3246.394479] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a99d7a-2279-597e-831c-16916fde80f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3246.898286] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a99d7a-2279-597e-831c-16916fde80f2, 'name': SearchDatastore_Task, 'duration_secs': 0.008781} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3246.898546] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2 is no longer used. Deleting! [ 3246.898686] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3246.898955] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a7fbd9f-589f-4539-ba95-11c7c2e7c9fb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3246.905174] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3246.905174] env[61964]: value = "task-1688867" [ 3246.905174] env[61964]: _type = "Task" [ 3246.905174] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3246.913536] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3247.416427] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281815} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3247.416800] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3247.416800] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/34f61656-335c-4441-a60a-30c605e04fd2" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3247.417019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3247.417157] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3247.417497] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3247.417790] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-841450c1-2c97-4c5a-9823-30e094c58fa8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3247.423418] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3247.423418] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a58d4-2e1a-134b-54d6-48538c9d894f" [ 3247.423418] env[61964]: _type = "Task" [ 3247.423418] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3247.432128] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a58d4-2e1a-134b-54d6-48538c9d894f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3247.934564] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522a58d4-2e1a-134b-54d6-48538c9d894f, 'name': SearchDatastore_Task, 'duration_secs': 0.037758} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3247.934884] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3 is no longer used. Deleting! [ 3247.935071] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3247.935326] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d33f58a2-534f-4dfa-b1f1-0a1f86ce4a21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3247.941387] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3247.941387] env[61964]: value = "task-1688868" [ 3247.941387] env[61964]: _type = "Task" [ 3247.941387] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3247.949159] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688868, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3248.452325] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688868, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130651} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3248.452899] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3248.452899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3abb9724-13b8-4ce0-b9c0-fdbb7b53def3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3248.452899] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3248.453034] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3248.453367] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3248.453802] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5be385f-0710-4da4-8158-26bdcd485baf {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3248.458395] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3248.458395] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5298100e-1564-e90c-684f-18cdd9f5a97b" [ 3248.458395] env[61964]: _type = "Task" [ 3248.458395] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3248.466341] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5298100e-1564-e90c-684f-18cdd9f5a97b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3248.968880] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5298100e-1564-e90c-684f-18cdd9f5a97b, 'name': SearchDatastore_Task, 'duration_secs': 0.009379} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3248.969158] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207/ts-2024-11-29-16-10-07 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3248.969427] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46bbe85a-9fc0-49b5-88f7-f378e4fa6abb {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3248.980992] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207/ts-2024-11-29-16-10-07 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3248.981144] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 5e252684-9940-4dfc-a50d-13a9c36d8207 is no longer used by this node. Pending deletion! [ 3248.981309] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3248.981510] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3248.981661] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3248.981991] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3248.982232] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25e0a0c0-3985-4700-a37f-78c8738e5b15 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3248.986244] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3248.986244] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bbbe3c-b941-8672-f77f-4fe274a40d68" [ 3248.986244] env[61964]: _type = "Task" [ 3248.986244] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3248.993640] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bbbe3c-b941-8672-f77f-4fe274a40d68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3249.496661] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52bbbe3c-b941-8672-f77f-4fe274a40d68, 'name': SearchDatastore_Task, 'duration_secs': 0.007965} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3249.497018] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64/ts-2024-11-29-16-10-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3249.497140] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16628342-11e4-4b96-8029-aff562899893 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3249.509264] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64/ts-2024-11-29-16-10-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3249.509402] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 2e9b9bfe-8d68-4679-96a6-a0669e5ced64 is no longer used by this node. Pending deletion! [ 3249.509562] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2e9b9bfe-8d68-4679-96a6-a0669e5ced64" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3249.509792] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3249.509919] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3249.510240] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3249.510469] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a682f5-c99a-4fe2-9433-e5e803f5ee0c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3249.514389] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3249.514389] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c9ebe-8a51-8a4f-4991-0af38ebc76da" [ 3249.514389] env[61964]: _type = "Task" [ 3249.514389] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3249.521676] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c9ebe-8a51-8a4f-4991-0af38ebc76da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3250.024964] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523c9ebe-8a51-8a4f-4991-0af38ebc76da, 'name': SearchDatastore_Task, 'duration_secs': 0.008625} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3250.025266] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460/ts-2024-11-29-16-10-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3250.025530] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ffb7777-5296-45c2-b0fe-684bf45f6e78 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3250.037079] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460/ts-2024-11-29-16-10-08 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3250.037219] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 78b0977c-33d1-40c7-836f-08f17ea59460 is no longer used by this node. Pending deletion! [ 3250.037380] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3250.037583] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3250.037705] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3250.038008] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3250.038242] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d84ad82a-7312-4b3d-9f02-6d1f535b0308 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3250.042292] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3250.042292] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ec04d3-e055-5f77-fdf6-5a07ed61bcc9" [ 3250.042292] env[61964]: _type = "Task" [ 3250.042292] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3250.049679] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ec04d3-e055-5f77-fdf6-5a07ed61bcc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3250.552846] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52ec04d3-e055-5f77-fdf6-5a07ed61bcc9, 'name': SearchDatastore_Task, 'duration_secs': 0.008185} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3250.553315] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3 is no longer used. Deleting! [ 3250.553315] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3250.553639] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a9ef079-f40f-4ec4-b825-951f66fc0905 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3250.559421] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3250.559421] env[61964]: value = "task-1688869" [ 3250.559421] env[61964]: _type = "Task" [ 3250.559421] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3250.566957] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3251.068148] env[61964]: DEBUG oslo_concurrency.lockutils [None req-0a24ee7d-1505-42af-b6ae-7fcd06a1f816 tempest-ServerShowV247Test-1790697772 tempest-ServerShowV247Test-1790697772-project-member] Acquiring lock "b4938cfb-bedf-4ecb-83fa-d172d6689414" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3251.071737] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101216} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3251.071974] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3251.072163] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/661f1d33-0209-46ab-998a-66f0a7b4bbb3" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3251.072378] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3251.072483] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3251.072840] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3251.073157] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87eea6d-360b-4f7c-b8c1-b66864f132b7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3251.078436] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3251.078436] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2f681-1c9c-94d7-d555-30599b38fb3a" [ 3251.078436] env[61964]: _type = "Task" [ 3251.078436] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3251.086866] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2f681-1c9c-94d7-d555-30599b38fb3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3251.589216] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a2f681-1c9c-94d7-d555-30599b38fb3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009181} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3251.589540] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5 is no longer used. Deleting! [ 3251.589700] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3251.589977] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d50d0acc-2d24-484a-b9d1-bd676882b2df {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3251.595836] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3251.595836] env[61964]: value = "task-1688870" [ 3251.595836] env[61964]: _type = "Task" [ 3251.595836] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3251.603577] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3251.644024] env[61964]: DEBUG oslo_concurrency.lockutils [None req-14170ed3-640e-41c6-8f8c-974f046d760f tempest-ServersTestJSON-184050650 tempest-ServersTestJSON-184050650-project-member] Acquiring lock "44fed5d6-967a-43e6-bfcf-91b107c41c83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3252.105868] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102441} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3252.108029] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3252.108029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3252.108029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/36e4f8ae-3551-441a-b581-d89ca6f05b41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3252.108029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/36e4f8ae-3551-441a-b581-d89ca6f05b41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3252.108029] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/36e4f8ae-3551-441a-b581-d89ca6f05b41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3252.108029] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3a2aedf-6ca0-4509-ba2f-27f76d89a81a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3252.111853] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3252.111853] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52601e35-55b3-c41b-33c3-7e320bb168a9" [ 3252.111853] env[61964]: _type = "Task" [ 3252.111853] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3252.119323] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52601e35-55b3-c41b-33c3-7e320bb168a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3252.622399] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52601e35-55b3-c41b-33c3-7e320bb168a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009387} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3252.622751] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/36e4f8ae-3551-441a-b581-d89ca6f05b41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3252.622951] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3252.623086] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3252.623401] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3252.623660] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a46d9d3b-b53c-4461-b411-664804275c41 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3252.627800] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3252.627800] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be4f95-f67c-0927-12b9-9ab9c9dd7f2f" [ 3252.627800] env[61964]: _type = "Task" [ 3252.627800] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3252.634922] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be4f95-f67c-0927-12b9-9ab9c9dd7f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3253.139052] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52be4f95-f67c-0927-12b9-9ab9c9dd7f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.008882} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3253.139052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3253.139052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3253.139052] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3253.139361] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3253.139457] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb33163-b405-4adf-a651-ad302a1924e0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3253.143649] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3253.143649] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52617f06-5cec-3be0-e643-81a3fa4f5ff5" [ 3253.143649] env[61964]: _type = "Task" [ 3253.143649] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3253.152517] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52617f06-5cec-3be0-e643-81a3fa4f5ff5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3253.654231] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52617f06-5cec-3be0-e643-81a3fa4f5ff5, 'name': SearchDatastore_Task, 'duration_secs': 0.009017} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3253.654546] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa is no longer used. Deleting! [ 3253.654689] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3253.654941] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d8d151e-c2c5-4f96-9bb2-c441f1e8205b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3253.660741] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3253.660741] env[61964]: value = "task-1688871" [ 3253.660741] env[61964]: _type = "Task" [ 3253.660741] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3253.668519] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3254.170862] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104115} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3254.171041] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3254.171206] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/9c4197bc-4731-4723-a639-cdac585b7efa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3254.171423] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3254.171539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3254.171854] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3254.172146] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be8e2247-9b4c-40a0-aa3e-88f548abc03c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3254.176201] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3254.176201] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5ee05-c0c8-ed56-abf2-c1cd07d2163e" [ 3254.176201] env[61964]: _type = "Task" [ 3254.176201] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3254.183414] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5ee05-c0c8-ed56-abf2-c1cd07d2163e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3254.686639] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e5ee05-c0c8-ed56-abf2-c1cd07d2163e, 'name': SearchDatastore_Task, 'duration_secs': 0.009233} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3254.686979] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4 is no longer used. Deleting! [ 3254.687110] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3254.687372] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d309ebc0-c878-4b54-860d-2662c162155a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3254.693207] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3254.693207] env[61964]: value = "task-1688872" [ 3254.693207] env[61964]: _type = "Task" [ 3254.693207] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3254.700789] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3255.203736] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097471} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3255.203953] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3255.204081] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a4486a4a-af70-4732-903d-7c40bb14bff4" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3255.204294] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3255.204413] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3255.204735] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3255.204997] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89092bc7-6091-46c0-bd6c-a394647a6600 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3255.209229] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3255.209229] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a34792-b6bd-d9f6-7eab-497c1027dbfa" [ 3255.209229] env[61964]: _type = "Task" [ 3255.209229] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3255.216641] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a34792-b6bd-d9f6-7eab-497c1027dbfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3255.719746] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a34792-b6bd-d9f6-7eab-497c1027dbfa, 'name': SearchDatastore_Task, 'duration_secs': 0.009096} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3255.720150] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049 is no longer used. Deleting! [ 3255.720279] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3255.720494] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a036bef9-0f5d-4239-bfe0-615c11e12c2e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3255.726842] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3255.726842] env[61964]: value = "task-1688873" [ 3255.726842] env[61964]: _type = "Task" [ 3255.726842] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3255.734284] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3256.236381] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10064} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3256.236600] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3256.236771] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/7a96b7b5-d4c9-4db6-bac2-607c11967049" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3256.236993] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3256.237128] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3256.237446] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3256.237701] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a192bd-37be-4c86-84dc-6d0a38abf76f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3256.241653] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3256.241653] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5221b3ea-c5bf-3b94-f968-d32bdafaa51b" [ 3256.241653] env[61964]: _type = "Task" [ 3256.241653] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3256.248798] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5221b3ea-c5bf-3b94-f968-d32bdafaa51b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3256.752597] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5221b3ea-c5bf-3b94-f968-d32bdafaa51b, 'name': SearchDatastore_Task, 'duration_secs': 0.008791} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3256.752955] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa is no longer used. Deleting! [ 3256.752992] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3256.753274] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51a03f22-f97d-4cce-a70b-4de87de46be7 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3256.758785] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3256.758785] env[61964]: value = "task-1688874" [ 3256.758785] env[61964]: _type = "Task" [ 3256.758785] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3256.766307] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688874, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3257.268787] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688874, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09629} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3257.268998] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3257.269190] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a2d1edbd-d33a-4332-892f-15b81eb6d3fa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3257.269409] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3257.269527] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3257.269930] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3257.270161] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3abdd95-71ad-46ec-a286-177afec2951c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3257.274611] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3257.274611] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52135df6-2bc7-f200-b8d2-18c50f2c97b0" [ 3257.274611] env[61964]: _type = "Task" [ 3257.274611] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3257.283769] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52135df6-2bc7-f200-b8d2-18c50f2c97b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3257.785084] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52135df6-2bc7-f200-b8d2-18c50f2c97b0, 'name': SearchDatastore_Task, 'duration_secs': 0.008674} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3257.785726] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a is no longer used. Deleting! [ 3257.785726] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3257.785842] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6892d065-4bf6-4687-a1a8-3da73646c6ce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3257.791831] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3257.791831] env[61964]: value = "task-1688875" [ 3257.791831] env[61964]: _type = "Task" [ 3257.791831] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3257.799232] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3258.301797] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104327} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3258.302034] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3258.302209] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4aa07396-e7fd-4086-bb6d-2bc1eb574a3a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3258.302421] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3258.302540] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3258.302859] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3258.303139] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-701d5fcd-7435-47eb-80b4-401f013b3b38 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3258.307535] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3258.307535] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d2fc7e-1c08-9f3c-70b7-5725dcbab252" [ 3258.307535] env[61964]: _type = "Task" [ 3258.307535] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3258.314823] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d2fc7e-1c08-9f3c-70b7-5725dcbab252, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3258.818209] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d2fc7e-1c08-9f3c-70b7-5725dcbab252, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3258.818542] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41 is no longer used. Deleting! [ 3258.818673] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3258.818928] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68da5417-b53c-4d04-bf29-f93016578fb1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3258.824567] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3258.824567] env[61964]: value = "task-1688876" [ 3258.824567] env[61964]: _type = "Task" [ 3258.824567] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3258.832574] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3259.336542] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121143} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3259.336796] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3259.336934] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1f91c02c-8d40-4706-82e1-22fd1cb5cf41" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3259.337173] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3259.337314] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3259.337619] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3259.337890] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f08a17-96c6-4481-b292-b087c4740d64 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3259.342406] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3259.342406] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5237162f-e7aa-1bb5-81ed-7334dcc87da3" [ 3259.342406] env[61964]: _type = "Task" [ 3259.342406] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3259.349860] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5237162f-e7aa-1bb5-81ed-7334dcc87da3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3259.854505] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5237162f-e7aa-1bb5-81ed-7334dcc87da3, 'name': SearchDatastore_Task, 'duration_secs': 0.008479} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3259.854863] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5 is no longer used. Deleting! [ 3259.854863] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3259.855026] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e2ebcdb-6200-4dc4-8b2b-1ee16826edba {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3259.864911] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3259.864911] env[61964]: value = "task-1688877" [ 3259.864911] env[61964]: _type = "Task" [ 3259.864911] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3259.877564] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3260.374332] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104766} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3260.374552] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3260.374724] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/f41fa6ca-4035-45bb-aacf-e97a1e1f02f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3260.374944] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3260.375073] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3260.375400] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3260.375654] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d27ea9-deb0-4976-810a-0536bb1ffe9c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3260.379845] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3260.379845] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52476cd5-ddab-cb7f-1c75-c8d777bb8223" [ 3260.379845] env[61964]: _type = "Task" [ 3260.379845] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3260.387052] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52476cd5-ddab-cb7f-1c75-c8d777bb8223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3260.890314] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52476cd5-ddab-cb7f-1c75-c8d777bb8223, 'name': SearchDatastore_Task, 'duration_secs': 0.010627} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3260.890628] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5 is no longer used. Deleting! [ 3260.890751] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3260.891011] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fc7eaf2-a03c-4bc2-9d04-5640e6d13535 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3260.896955] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3260.896955] env[61964]: value = "task-1688878" [ 3260.896955] env[61964]: _type = "Task" [ 3260.896955] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3260.903961] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3261.407157] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101672} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3261.407389] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3261.407561] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/187d9e64-0b35-41b7-b3c9-b798b3b4dcc5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3261.407777] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3261.407894] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3261.408230] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3261.408499] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-225f59c9-295f-45da-bd3e-47d44d923567 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3261.412858] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3261.412858] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f300d1-354b-41b9-0782-ef76d4cb5ebc" [ 3261.412858] env[61964]: _type = "Task" [ 3261.412858] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3261.421434] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f300d1-354b-41b9-0782-ef76d4cb5ebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3261.927667] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f300d1-354b-41b9-0782-ef76d4cb5ebc, 'name': SearchDatastore_Task, 'duration_secs': 0.008046} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3261.928220] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8/ts-2024-11-29-16-10-20 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3261.928540] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d07812f-62a2-4dc7-91fd-4dd04679d9f6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3261.940542] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8/ts-2024-11-29-16-10-20 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3261.940710] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image ec92b9ad-c2d6-45d2-972c-3057b24506c8 is no longer used by this node. Pending deletion! [ 3261.940857] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3261.941082] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3261.941203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3261.941519] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3261.941850] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269165c4-0c5b-47e6-aa4f-b443f20dee77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3261.946239] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3261.946239] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5280d84a-449b-c273-5dae-560477576f8b" [ 3261.946239] env[61964]: _type = "Task" [ 3261.946239] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3261.953547] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5280d84a-449b-c273-5dae-560477576f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3262.457033] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5280d84a-449b-c273-5dae-560477576f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.008078} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3262.457320] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae is no longer used. Deleting! [ 3262.457536] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3262.457827] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4ee2601-d5c6-42ac-8708-7a463d94c818 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3262.463722] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3262.463722] env[61964]: value = "task-1688879" [ 3262.463722] env[61964]: _type = "Task" [ 3262.463722] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3262.470892] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3262.973457] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094162} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3262.973962] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3262.973962] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3262.974123] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3262.974249] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3262.974496] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3262.974765] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf7f61e-61d8-4a1c-8b0a-05d334101008 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3262.979154] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3262.979154] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a93eb-e62d-6ad4-af1d-9d4dafbec6d8" [ 3262.979154] env[61964]: _type = "Task" [ 3262.979154] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3262.986737] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a93eb-e62d-6ad4-af1d-9d4dafbec6d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3263.490134] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]523a93eb-e62d-6ad4-af1d-9d4dafbec6d8, 'name': SearchDatastore_Task, 'duration_secs': 0.008791} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3263.490403] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c/ts-2024-11-29-16-10-22 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3263.490665] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98faf522-7a27-4198-a3a1-53edd23ad9ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3263.501640] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c/ts-2024-11-29-16-10-22 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3263.501789] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image e9775c62-1e83-4c3b-aca1-5151afa0bb0c is no longer used by this node. Pending deletion! [ 3263.501961] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3263.502181] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3263.502297] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3263.502599] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3263.502861] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15f90118-e1ba-46d1-81ae-205eeffb117c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3263.506822] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3263.506822] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525886e2-1053-9bf0-b1e2-cb216ee4b22d" [ 3263.506822] env[61964]: _type = "Task" [ 3263.506822] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3263.514033] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525886e2-1053-9bf0-b1e2-cb216ee4b22d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3264.016777] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]525886e2-1053-9bf0-b1e2-cb216ee4b22d, 'name': SearchDatastore_Task, 'duration_secs': 0.008124} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3264.017142] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35 is no longer used. Deleting! [ 3264.017252] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3264.017489] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dd710f5-8262-4c45-86db-0e9ca89b6bc3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3264.023192] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3264.023192] env[61964]: value = "task-1688880" [ 3264.023192] env[61964]: _type = "Task" [ 3264.023192] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3264.030107] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3264.535536] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099633} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3264.535882] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3264.536158] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a8fb39e3-2311-4098-8ef4-332e53218d35" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3264.536485] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3264.536683] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3264.537136] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3264.537505] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac78f28-5c54-4481-ad4b-e9b32a7d2b4b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3264.542903] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3264.542903] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bc657-e81d-a43a-c100-22ca35885f84" [ 3264.542903] env[61964]: _type = "Task" [ 3264.542903] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3264.553328] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bc657-e81d-a43a-c100-22ca35885f84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3265.053480] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bc657-e81d-a43a-c100-22ca35885f84, 'name': SearchDatastore_Task, 'duration_secs': 0.008468} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3265.053788] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd/ts-2024-11-29-16-10-23 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3265.054020] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0755d40c-cde3-4f72-b827-eb1f6ff904ff {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3265.065038] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd/ts-2024-11-29-16-10-23 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3265.065190] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 1a16430b-f576-459c-ac09-bbd0b1c4e7fd is no longer used by this node. Pending deletion! [ 3265.065345] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3265.065551] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3265.065664] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3265.065990] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3265.066234] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03c5c735-b29c-4565-bcc5-d1f8c416a086 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3265.070125] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3265.070125] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d1c814-54cf-a08d-a3b1-81788f357d3b" [ 3265.070125] env[61964]: _type = "Task" [ 3265.070125] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3265.077741] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d1c814-54cf-a08d-a3b1-81788f357d3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3265.580733] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d1c814-54cf-a08d-a3b1-81788f357d3b, 'name': SearchDatastore_Task, 'duration_secs': 0.007647} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3265.581043] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da is no longer used. Deleting! [ 3265.581193] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3265.581447] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d83d00ab-8698-4a3a-b8ce-434e9a704a3c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3265.587522] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3265.587522] env[61964]: value = "task-1688881" [ 3265.587522] env[61964]: _type = "Task" [ 3265.587522] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3265.596338] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3266.097837] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104042} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3266.098245] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3266.098245] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/7c07614c-1f2d-4489-84aa-800cea4bc3da" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3266.098465] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3266.098584] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3266.098893] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3266.099177] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c754c0e0-3c15-4123-86c3-82914e1c47ef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3266.103570] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3266.103570] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52256695-52c1-db4e-8802-e77f3682de60" [ 3266.103570] env[61964]: _type = "Task" [ 3266.103570] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3266.110796] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52256695-52c1-db4e-8802-e77f3682de60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3266.614480] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52256695-52c1-db4e-8802-e77f3682de60, 'name': SearchDatastore_Task, 'duration_secs': 0.008622} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3266.614799] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8 is no longer used. Deleting! [ 3266.614944] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3266.615233] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58fb68e0-f230-4b32-8f42-db4ce81d09b5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3266.622015] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3266.622015] env[61964]: value = "task-1688882" [ 3266.622015] env[61964]: _type = "Task" [ 3266.622015] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3266.629513] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3267.133393] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126083} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3267.133393] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3267.133393] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/89c53200-10f1-43f0-9c74-bcb898fe64b8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3267.133654] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3267.133654] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3267.133893] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3267.134145] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea3ea16c-83cf-49fe-9e34-45064a62bf9a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3267.138504] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3267.138504] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524188d9-1477-65f0-1db8-fe0d3e2b7c7b" [ 3267.138504] env[61964]: _type = "Task" [ 3267.138504] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3267.146539] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524188d9-1477-65f0-1db8-fe0d3e2b7c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3267.649981] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524188d9-1477-65f0-1db8-fe0d3e2b7c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3267.649981] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147 is no longer used. Deleting! [ 3267.649981] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3267.649981] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba5c5a9a-4a88-48f3-91cf-2e4c4584df94 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3267.655951] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3267.655951] env[61964]: value = "task-1688883" [ 3267.655951] env[61964]: _type = "Task" [ 3267.655951] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3267.663455] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688883, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3268.166190] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688883, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101451} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3268.166549] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3268.166549] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6c421a47-fade-4d53-947b-17973bdf8147" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3268.166748] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3268.166868] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3268.167203] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3268.167473] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd1eea16-5fc5-44e7-950b-665ba57a4c2d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3268.171864] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3268.171864] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bd025-d950-1217-016e-643565a27b8a" [ 3268.171864] env[61964]: _type = "Task" [ 3268.171864] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3268.180131] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bd025-d950-1217-016e-643565a27b8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3268.682531] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]524bd025-d950-1217-016e-643565a27b8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3268.682804] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d/ts-2024-11-29-16-10-27 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3268.683083] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ee31482-383c-4411-ad40-22c977699748 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3268.694525] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d/ts-2024-11-29-16-10-27 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3268.694659] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 606aa004-110a-4bdd-aa5b-6db84ddce29d is no longer used by this node. Pending deletion! [ 3268.694823] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3268.695048] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c1b1d8de-2ef6-4626-b2ce-92479813c7cc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3268.695170] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/c1b1d8de-2ef6-4626-b2ce-92479813c7cc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3268.695475] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c1b1d8de-2ef6-4626-b2ce-92479813c7cc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3268.695722] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cdc3eae-2ede-4568-a5ec-ae8e62b3c3dd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3268.699889] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3268.699889] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b2c2c3-bde0-d443-d771-be5cebd53009" [ 3268.699889] env[61964]: _type = "Task" [ 3268.699889] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3268.707807] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b2c2c3-bde0-d443-d771-be5cebd53009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3269.212267] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b2c2c3-bde0-d443-d771-be5cebd53009, 'name': SearchDatastore_Task, 'duration_secs': 0.008373} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3269.212600] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/c1b1d8de-2ef6-4626-b2ce-92479813c7cc" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3269.212811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3269.212932] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3269.213264] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3269.213537] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dceb309d-21a7-4044-bf46-82b18653cad3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3269.218183] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3269.218183] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5297d476-003b-5756-8a38-0c0350f88d2b" [ 3269.218183] env[61964]: _type = "Task" [ 3269.218183] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3269.225597] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5297d476-003b-5756-8a38-0c0350f88d2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3269.728710] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5297d476-003b-5756-8a38-0c0350f88d2b, 'name': SearchDatastore_Task, 'duration_secs': 0.009221} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3269.728976] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3269.729223] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3269.729346] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3269.729671] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3269.729978] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbfd0740-f720-4a1a-9435-33ac723878ee {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3269.734348] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3269.734348] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5264e977-d81a-d6b9-7fe8-3288651dd6c2" [ 3269.734348] env[61964]: _type = "Task" [ 3269.734348] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3269.742055] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5264e977-d81a-d6b9-7fe8-3288651dd6c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3270.245125] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5264e977-d81a-d6b9-7fe8-3288651dd6c2, 'name': SearchDatastore_Task, 'duration_secs': 0.007964} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3270.245405] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381/ts-2024-11-29-16-10-29 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3270.245645] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dde5162-8a05-4225-a072-c8c6778593df {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3270.258484] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381/ts-2024-11-29-16-10-29 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3270.258649] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 91b7886f-79eb-4874-81dc-03841a653381 is no longer used by this node. Pending deletion! [ 3270.258869] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3270.259048] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3270.259168] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3270.259479] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3270.259760] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d5e6a78-2a62-4b23-9364-fc057566715a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3270.264128] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3270.264128] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52131b9b-35b4-3ffc-10ce-c68a72deed8a" [ 3270.264128] env[61964]: _type = "Task" [ 3270.264128] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3270.271643] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52131b9b-35b4-3ffc-10ce-c68a72deed8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3270.775361] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52131b9b-35b4-3ffc-10ce-c68a72deed8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009194} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3270.775683] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a is no longer used. Deleting! [ 3270.775837] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3270.776107] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cfa0f1f-9d8e-473c-9e09-8d57d9a7a34f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3270.782386] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3270.782386] env[61964]: value = "task-1688884" [ 3270.782386] env[61964]: _type = "Task" [ 3270.782386] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3270.790821] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3271.292033] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109065} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3271.292351] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3271.292417] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/07b9f99a-1c09-43bc-8ab7-3e61698d648a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3271.292608] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3271.292726] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3271.293080] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3271.293338] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3266048-aac6-43a6-b732-0c7dc22138cd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3271.297610] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3271.297610] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c78f16-1945-695c-9936-6ce212c5e389" [ 3271.297610] env[61964]: _type = "Task" [ 3271.297610] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3271.304699] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c78f16-1945-695c-9936-6ce212c5e389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3271.808375] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52c78f16-1945-695c-9936-6ce212c5e389, 'name': SearchDatastore_Task, 'duration_secs': 0.00828} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3271.808655] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a/ts-2024-11-29-16-10-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3271.808920] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce56d665-b450-4e84-a60c-197cfdf9cd03 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3271.820836] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a/ts-2024-11-29-16-10-30 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3271.820974] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 4c4220e0-43c3-4e73-8d51-f8db8a49271a is no longer used by this node. Pending deletion! [ 3271.821149] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3271.821360] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3271.821479] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3271.821850] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3271.822071] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f5002c6-bd58-470c-80be-7890443c2e77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3271.826243] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3271.826243] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b53582-ea65-fd2a-d196-c1a722a71354" [ 3271.826243] env[61964]: _type = "Task" [ 3271.826243] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3271.833876] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b53582-ea65-fd2a-d196-c1a722a71354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3272.337547] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b53582-ea65-fd2a-d196-c1a722a71354, 'name': SearchDatastore_Task, 'duration_secs': 0.008434} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3272.337836] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff is no longer used. Deleting! [ 3272.337942] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3272.338219] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-197b747d-406c-4a93-9784-c8ae2979cb50 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3272.344715] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3272.344715] env[61964]: value = "task-1688885" [ 3272.344715] env[61964]: _type = "Task" [ 3272.344715] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3272.351835] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688885, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3272.854531] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688885, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1117} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3272.854758] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3272.854925] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e26c9743-3bd3-4acf-9204-5c52195402ff" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3272.855176] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3272.855305] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3272.855630] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3272.855891] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dd0c185-21fd-410c-8c76-c7a5fba23909 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3272.860077] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3272.860077] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52370cf6-5863-ddd7-21b4-38824cf22824" [ 3272.860077] env[61964]: _type = "Task" [ 3272.860077] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3272.868031] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52370cf6-5863-ddd7-21b4-38824cf22824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3273.371510] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52370cf6-5863-ddd7-21b4-38824cf22824, 'name': SearchDatastore_Task, 'duration_secs': 0.008187} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3273.371833] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875/ts-2024-11-29-16-10-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3273.372038] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d4dd36b-8831-47df-a82c-1513827b9c45 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3273.384175] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875/ts-2024-11-29-16-10-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3273.384328] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 7acfe9ad-b917-4a13-9bc2-ac9b8341f875 is no longer used by this node. Pending deletion! [ 3273.384497] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/7acfe9ad-b917-4a13-9bc2-ac9b8341f875" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3273.384710] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3273.384838] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3273.385191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3273.385436] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84ebb647-69fd-4011-9260-7d3bf8da5106 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3273.389785] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3273.389785] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b9b55b-e6d1-7b92-a0c2-1d0b0678c07a" [ 3273.389785] env[61964]: _type = "Task" [ 3273.389785] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3273.397462] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b9b55b-e6d1-7b92-a0c2-1d0b0678c07a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3273.900549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b9b55b-e6d1-7b92-a0c2-1d0b0678c07a, 'name': SearchDatastore_Task, 'duration_secs': 0.008114} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3273.900816] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325/ts-2024-11-29-16-10-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3273.901088] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0832e827-1525-474b-87ee-096bcb36fa2a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3273.913275] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325/ts-2024-11-29-16-10-32 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3273.913470] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image fe3ff768-4508-4f71-bdbf-cc790f67e325 is no longer used by this node. Pending deletion! [ 3273.913728] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3273.914035] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3273.914226] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3273.914617] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3273.914880] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76ab1319-6380-4ccd-8d29-f12b2e505d31 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3273.919177] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3273.919177] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521e69a6-8787-5480-66ca-5f15a392429d" [ 3273.919177] env[61964]: _type = "Task" [ 3273.919177] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3273.926549] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521e69a6-8787-5480-66ca-5f15a392429d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3274.429684] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]521e69a6-8787-5480-66ca-5f15a392429d, 'name': SearchDatastore_Task, 'duration_secs': 0.008193} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3274.429975] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c/ts-2024-11-29-16-10-33 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3274.430286] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9004565c-3867-4faf-a988-41ae88f62484 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3274.441623] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c/ts-2024-11-29-16-10-33 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3274.441772] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 38183c5c-65a6-44a9-a790-7887aab2a27c is no longer used by this node. Pending deletion! [ 3274.441937] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3274.442164] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3274.442282] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3274.442582] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3274.442842] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1a9b978-bade-49d6-a65c-66304c8d1d77 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3274.447260] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3274.447260] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5219d30e-d001-09d8-7f83-996e03b8690d" [ 3274.447260] env[61964]: _type = "Task" [ 3274.447260] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3274.454442] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5219d30e-d001-09d8-7f83-996e03b8690d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3274.957304] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5219d30e-d001-09d8-7f83-996e03b8690d, 'name': SearchDatastore_Task, 'duration_secs': 0.007786} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3274.957576] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293/ts-2024-11-29-16-10-33 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3274.957837] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb057c5c-c02c-4248-ba20-6b35dbd14de3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3274.969191] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293/ts-2024-11-29-16-10-33 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3274.969331] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image e59c6db1-2b82-4035-a564-459cc2761293 is no longer used by this node. Pending deletion! [ 3274.969491] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3274.969713] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3274.969846] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3274.970164] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3274.970394] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1466b0e4-7961-4f81-97fd-1d35581af5c2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3274.974566] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3274.974566] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527dfafd-ce4f-7631-6768-382b5d45e33e" [ 3274.974566] env[61964]: _type = "Task" [ 3274.974566] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3274.981718] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527dfafd-ce4f-7631-6768-382b5d45e33e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3275.485335] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527dfafd-ce4f-7631-6768-382b5d45e33e, 'name': SearchDatastore_Task, 'duration_secs': 0.008274} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3275.485674] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3275.485908] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3275.486021] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3275.486327] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3275.486631] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d72970a5-7f20-47d7-8672-edcc8c362788 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3275.491013] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3275.491013] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5236d4e4-8c48-da25-fc8b-b6a9b362fb9c" [ 3275.491013] env[61964]: _type = "Task" [ 3275.491013] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3275.500086] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5236d4e4-8c48-da25-fc8b-b6a9b362fb9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3276.001619] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5236d4e4-8c48-da25-fc8b-b6a9b362fb9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009495} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3276.001962] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f is no longer used. Deleting! [ 3276.002133] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3276.002411] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-594c2645-267c-44fd-9157-7a50a741c948 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3276.008467] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3276.008467] env[61964]: value = "task-1688886" [ 3276.008467] env[61964]: _type = "Task" [ 3276.008467] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3276.015920] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3276.522277] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110324} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3276.522617] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3276.522830] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/34288a4c-b2c6-4752-b2cd-ad5886a0eb3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3276.523241] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3276.523451] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3276.523875] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3276.524259] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d21a24a4-26b6-4318-ac5e-75f367e8c239 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3276.529947] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3276.529947] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a05de5-7e3f-28f9-411e-e6e0446a89db" [ 3276.529947] env[61964]: _type = "Task" [ 3276.529947] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3276.542199] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a05de5-7e3f-28f9-411e-e6e0446a89db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3277.047449] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52a05de5-7e3f-28f9-411e-e6e0446a89db, 'name': SearchDatastore_Task, 'duration_secs': 0.009212} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3277.047775] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764 is no longer used. Deleting! [ 3277.047924] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3277.048266] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0dfa448e-ef1a-4fcb-a80e-17fadc7e2338 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3277.056283] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3277.056283] env[61964]: value = "task-1688887" [ 3277.056283] env[61964]: _type = "Task" [ 3277.056283] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3277.070617] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3277.566518] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111157} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3277.566774] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3277.566821] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/0197ef80-a03c-499f-9fdb-bc81384ad764" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3277.568061] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3277.568061] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3277.568061] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3277.568061] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77811018-23b8-4ecc-b88a-e1f98d95354a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3277.572037] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3277.572037] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52378df0-0776-8532-0347-3d253d94f45e" [ 3277.572037] env[61964]: _type = "Task" [ 3277.572037] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3277.579421] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52378df0-0776-8532-0347-3d253d94f45e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3278.082445] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52378df0-0776-8532-0347-3d253d94f45e, 'name': SearchDatastore_Task, 'duration_secs': 0.009633} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3278.082813] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218 is no longer used. Deleting! [ 3278.083015] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3278.083323] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-416549ca-0e12-4dd7-8f0b-5f620126b882 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3278.090170] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3278.090170] env[61964]: value = "task-1688888" [ 3278.090170] env[61964]: _type = "Task" [ 3278.090170] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3278.097962] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3278.601149] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114578} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3278.601461] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3278.601539] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e07500c1-d76b-42c4-ba45-e66c427bd218" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3278.601755] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3278.601897] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3278.602244] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3278.602509] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1afbac0-05cd-42c5-818c-b86b2dc4ede8 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3278.606965] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3278.606965] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5208ba51-c0d0-fb91-5b76-ff88d6805f85" [ 3278.606965] env[61964]: _type = "Task" [ 3278.606965] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3278.614544] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5208ba51-c0d0-fb91-5b76-ff88d6805f85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3279.117272] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5208ba51-c0d0-fb91-5b76-ff88d6805f85, 'name': SearchDatastore_Task, 'duration_secs': 0.008337} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3279.117565] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95/ts-2024-11-29-16-10-37 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3279.117838] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c046908-2e2d-4a0f-8c38-2bd2272ae73f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3279.132225] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95/ts-2024-11-29-16-10-37 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3279.132225] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 4e5561a3-16c4-4011-9364-3201c12a7f95 is no longer used by this node. Pending deletion! [ 3279.132225] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3279.132225] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3279.132225] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3279.132225] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3279.132225] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18cf36c2-e617-49b3-b88c-70dded21d2cc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3279.137264] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3279.137264] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529588fc-3fe2-260a-6a7d-024e16cc28ce" [ 3279.137264] env[61964]: _type = "Task" [ 3279.137264] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3279.143998] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529588fc-3fe2-260a-6a7d-024e16cc28ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3279.647506] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529588fc-3fe2-260a-6a7d-024e16cc28ce, 'name': SearchDatastore_Task, 'duration_secs': 0.008575} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3279.647902] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3279.648019] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3279.648145] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3279.648496] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3279.648763] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-685e0b8b-b5a9-4785-a488-57b417311c5f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3279.652952] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3279.652952] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d4f831-da21-737f-02dc-ab145eb44d20" [ 3279.652952] env[61964]: _type = "Task" [ 3279.652952] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3279.660246] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d4f831-da21-737f-02dc-ab145eb44d20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3280.163203] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d4f831-da21-737f-02dc-ab145eb44d20, 'name': SearchDatastore_Task, 'duration_secs': 0.009122} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3280.163438] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74 is no longer used. Deleting! [ 3280.163591] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3280.163851] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fa6c39c-a459-422f-a9dd-cf7546123106 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3280.169509] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3280.169509] env[61964]: value = "task-1688889" [ 3280.169509] env[61964]: _type = "Task" [ 3280.169509] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3280.177036] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3280.679559] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104757} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3280.679873] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3280.679969] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/699f2226-050e-4042-a4d2-2dec5bc1af74" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3280.680192] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3280.680308] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3280.680628] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3280.680886] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce664949-8b5e-4607-ad47-32425d244a5c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3280.685081] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3280.685081] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52104d48-d6b3-6329-df8e-85fe6694bc36" [ 3280.685081] env[61964]: _type = "Task" [ 3280.685081] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3280.692968] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52104d48-d6b3-6329-df8e-85fe6694bc36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3281.196540] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52104d48-d6b3-6329-df8e-85fe6694bc36, 'name': SearchDatastore_Task, 'duration_secs': 0.008538} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3281.196844] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55 is no longer used. Deleting! [ 3281.196987] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3281.197251] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef1f6b44-338d-420d-83f6-15d518a3e7ac {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3281.203598] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3281.203598] env[61964]: value = "task-1688890" [ 3281.203598] env[61964]: _type = "Task" [ 3281.203598] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3281.210861] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3281.713430] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10658} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3281.713782] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3281.713820] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e069aefe-ac4d-40f1-95e6-72ffd35a3f55" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3281.714037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3281.714165] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3281.714487] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3281.714754] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d523888f-e4e6-4063-853d-86d7450cb65e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3281.719599] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3281.719599] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cc0066-5058-ef1f-0bfc-8ce15bb175c8" [ 3281.719599] env[61964]: _type = "Task" [ 3281.719599] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3281.728369] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cc0066-5058-ef1f-0bfc-8ce15bb175c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3282.230704] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52cc0066-5058-ef1f-0bfc-8ce15bb175c8, 'name': SearchDatastore_Task, 'duration_secs': 0.008147} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3282.230965] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406/ts-2024-11-29-16-10-41 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3282.231264] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17820bc8-2c38-4bea-966b-05ecc0a231d1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3282.242664] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406/ts-2024-11-29-16-10-41 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3282.242802] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 96b73393-fc9f-4a7a-9847-6bb071a57406 is no longer used by this node. Pending deletion! [ 3282.242988] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/96b73393-fc9f-4a7a-9847-6bb071a57406" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3282.243218] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3282.243336] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3282.243651] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3282.243892] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb4f6f1f-1eb6-45e2-9222-32531eb45332 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3282.248221] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3282.248221] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52adcfbe-93d7-32ee-656b-f0011cd9b248" [ 3282.248221] env[61964]: _type = "Task" [ 3282.248221] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3282.255768] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52adcfbe-93d7-32ee-656b-f0011cd9b248, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3282.758478] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52adcfbe-93d7-32ee-656b-f0011cd9b248, 'name': SearchDatastore_Task, 'duration_secs': 0.007653} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3282.758902] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6/ts-2024-11-29-16-10-41 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3282.759031] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f170c890-b5e1-4d6c-8f8d-df97cc0610d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3282.771132] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6/ts-2024-11-29-16-10-41 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3282.771287] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 109daf4b-e7cc-432a-a8f5-9998849286e6 is no longer used by this node. Pending deletion! [ 3282.771447] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3282.771665] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3282.771780] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3282.772152] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3282.772391] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b7e5075-61e4-41da-b834-addc86e34462 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3282.776507] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3282.776507] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e7dadd-2a9f-298b-d9fc-43e5ac5a054c" [ 3282.776507] env[61964]: _type = "Task" [ 3282.776507] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3282.784084] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e7dadd-2a9f-298b-d9fc-43e5ac5a054c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3283.287241] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e7dadd-2a9f-298b-d9fc-43e5ac5a054c, 'name': SearchDatastore_Task, 'duration_secs': 0.008804} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3283.287570] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb is no longer used. Deleting! [ 3283.287712] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3283.287972] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a904e187-7c1b-4441-a56f-3cdb1f9ba967 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3283.294491] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3283.294491] env[61964]: value = "task-1688891" [ 3283.294491] env[61964]: _type = "Task" [ 3283.294491] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3283.301681] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3283.804340] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100255} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3283.804695] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3283.804732] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/df254227-4e61-4c7a-a973-061dce81aefb" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3283.804943] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3283.805075] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3283.805399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3283.805668] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b35d44-d631-461b-ad53-91b2f2fbdcce {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3283.809780] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3283.809780] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5252cf33-04c5-332f-fd2b-13a129f7be29" [ 3283.809780] env[61964]: _type = "Task" [ 3283.809780] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3283.817034] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5252cf33-04c5-332f-fd2b-13a129f7be29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3284.320120] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5252cf33-04c5-332f-fd2b-13a129f7be29, 'name': SearchDatastore_Task, 'duration_secs': 0.009191} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3284.320455] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3284.320683] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3284.320804] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3284.321131] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3284.321401] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57f0a718-b7db-43d9-ba8b-0cd52b113a21 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3284.325333] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3284.325333] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5204033e-5bbc-f239-4bd4-a761b72ade6b" [ 3284.325333] env[61964]: _type = "Task" [ 3284.325333] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3284.332432] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5204033e-5bbc-f239-4bd4-a761b72ade6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3284.836917] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5204033e-5bbc-f239-4bd4-a761b72ade6b, 'name': SearchDatastore_Task, 'duration_secs': 0.00858} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3284.837308] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732 is no longer used. Deleting! [ 3284.837373] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3284.837630] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88363e23-efdd-4fc8-aeb8-ba7bc0a6c755 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3284.843783] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3284.843783] env[61964]: value = "task-1688892" [ 3284.843783] env[61964]: _type = "Task" [ 3284.843783] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3284.851417] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3285.353826] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105685} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3285.354051] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3285.354256] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/90284609-f2c9-4a94-b94c-8a3f383c3732" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3285.354487] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3285.354606] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3285.354918] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3285.355196] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d5686cf-fbca-4c03-832e-1aa98063372a {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3285.359336] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3285.359336] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52977981-cf5d-4792-c843-8358cffa5771" [ 3285.359336] env[61964]: _type = "Task" [ 3285.359336] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3285.366692] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52977981-cf5d-4792-c843-8358cffa5771, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3285.870049] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52977981-cf5d-4792-c843-8358cffa5771, 'name': SearchDatastore_Task, 'duration_secs': 0.008724} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3285.870404] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8 is no longer used. Deleting! [ 3285.870499] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3285.870760] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-374e62a3-2f7a-46ba-bf35-d191c929a591 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3285.876854] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3285.876854] env[61964]: value = "task-1688893" [ 3285.876854] env[61964]: _type = "Task" [ 3285.876854] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3285.883848] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3286.177893] env[61964]: WARNING oslo_vmware.rw_handles [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles response.begin() [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3286.177893] env[61964]: ERROR oslo_vmware.rw_handles [ 3286.178461] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Downloaded image file data d9802c76-d112-4072-8a46-ca03ed36e004 to vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3286.180519] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Caching image {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3286.180769] env[61964]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Copying Virtual Disk [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk to [datastore1] vmware_temp/54bb3007-d1f5-4471-b2bd-f7fb59f47976/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk {{(pid=61964) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3286.181073] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-569c2165-7f14-479a-ae91-a201433f4f06 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.188431] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 3286.188431] env[61964]: value = "task-1688894" [ 3286.188431] env[61964]: _type = "Task" [ 3286.188431] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3286.196381] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3286.385866] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103033} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3286.386102] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3286.386283] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6db713c7-788e-4550-badd-2a12f93208d8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3286.386500] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3286.386622] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3286.386956] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3286.387219] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6965b96d-f515-4721-8206-db4e3126802f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.391246] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3286.391246] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b79f17-f84c-f7eb-a153-cf9c1c604324" [ 3286.391246] env[61964]: _type = "Task" [ 3286.391246] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3286.398436] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b79f17-f84c-f7eb-a153-cf9c1c604324, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3286.699134] env[61964]: DEBUG oslo_vmware.exceptions [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Fault InvalidArgument not matched. {{(pid=61964) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3286.699435] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3286.699999] env[61964]: ERROR nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3286.699999] env[61964]: Faults: ['InvalidArgument'] [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] Traceback (most recent call last): [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] yield resources [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self.driver.spawn(context, instance, image_meta, [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self._fetch_image_if_missing(context, vi) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] image_cache(vi, tmp_image_ds_loc) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] vm_util.copy_virtual_disk( [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] session._wait_for_task(vmdk_copy_task) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return self.wait_for_task(task_ref) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return evt.wait() [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] result = hub.switch() [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return self.greenlet.switch() [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self.f(*self.args, **self.kw) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] raise exceptions.translate_fault(task_info.error) [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] Faults: ['InvalidArgument'] [ 3286.699999] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] [ 3286.701181] env[61964]: INFO nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Terminating instance [ 3286.701839] env[61964]: DEBUG oslo_concurrency.lockutils [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d9802c76-d112-4072-8a46-ca03ed36e004/d9802c76-d112-4072-8a46-ca03ed36e004.vmdk" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3286.702088] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3286.702335] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3153fe39-3703-481c-94c9-7d8b0dc4b74e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.704485] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3286.704672] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3286.705392] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a6a075-fff9-48a8-854d-969689d8f974 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.712181] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Unregistering the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3286.712388] env[61964]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e496b41-0855-4f50-a2f3-179c5c64084b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.714461] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3286.714634] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61964) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3286.715553] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87f9a3ea-1cc1-4dd3-83b2-17ee0aa8e405 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.719986] env[61964]: DEBUG oslo_vmware.api [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Waiting for the task: (returnval){ [ 3286.719986] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52434412-c557-5f60-3be2-6b147b591327" [ 3286.719986] env[61964]: _type = "Task" [ 3286.719986] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3286.727786] env[61964]: DEBUG oslo_vmware.api [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52434412-c557-5f60-3be2-6b147b591327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3286.784981] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Unregistered the VM {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3286.785222] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Deleting contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3286.785403] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleting the datastore file [datastore1] a50141eb-d189-4970-9adc-10a25409b99a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3286.785659] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5da9e026-fbbb-4312-a9ee-efa795b071d6 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.791652] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for the task: (returnval){ [ 3286.791652] env[61964]: value = "task-1688896" [ 3286.791652] env[61964]: _type = "Task" [ 3286.791652] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3286.798989] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3286.901306] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52b79f17-f84c-f7eb-a153-cf9c1c604324, 'name': SearchDatastore_Task, 'duration_secs': 0.008314} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3286.901642] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d is no longer used. Deleting! [ 3286.901727] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3286.901965] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df4f5fd9-bc24-4908-ac79-bb2e306e245d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3286.907364] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3286.907364] env[61964]: value = "task-1688897" [ 3286.907364] env[61964]: _type = "Task" [ 3286.907364] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3286.914923] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688897, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3287.232055] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Preparing fetch location {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3287.232055] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating directory with path [datastore1] vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3287.232055] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f920c05-9bca-4982-8e67-74f3e3dcb63b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.242548] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Created directory with path [datastore1] vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3287.242726] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Fetch image to [datastore1] vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk {{(pid=61964) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3287.242899] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to [datastore1] vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk on the data store datastore1 {{(pid=61964) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3287.243595] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d781d5f0-6d0c-42f6-a8a2-1d9b36b41294 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.249811] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51db4be8-262b-4e1a-ace7-3cb79ab45c4c {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.258902] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341d0e8a-86a8-48bd-9364-a3b0371a5e29 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.288485] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17041b45-190c-42d2-8d65-054e38cdd866 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.296491] env[61964]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-69b53f5f-2c08-45c5-82d5-0114da99ee7e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.300599] env[61964]: DEBUG oslo_vmware.api [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Task: {'id': task-1688896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069669} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3287.301178] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3287.301418] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Deleted contents of the VM from datastore datastore1 {{(pid=61964) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3287.301606] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3287.301778] env[61964]: INFO nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 3287.303863] env[61964]: DEBUG nova.compute.claims [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Aborting claim: {{(pid=61964) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3287.304062] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3287.304286] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3287.319934] env[61964]: DEBUG nova.virt.vmwareapi.images [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] [instance: 23ae2618-093b-49a7-b3e4-3d8038e08cc1] Downloading image file data d9802c76-d112-4072-8a46-ca03ed36e004 to the data store datastore1 {{(pid=61964) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3287.371359] env[61964]: DEBUG oslo_vmware.rw_handles [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3287.434460] env[61964]: DEBUG oslo_vmware.rw_handles [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Completed reading data from the image iterator. {{(pid=61964) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3287.434653] env[61964]: DEBUG oslo_vmware.rw_handles [None req-33f03ad4-a9cd-46ed-a933-ba51c6138f4f tempest-ServerDiskConfigTestJSON-1136128862 tempest-ServerDiskConfigTestJSON-1136128862-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5147c234-3786-4f24-8b0b-4c74d249998e/d9802c76-d112-4072-8a46-ca03ed36e004/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61964) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3287.439253] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688897, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101643} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3287.439472] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3287.439638] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3cc096b8-f1ac-4488-8051-311bbdc6055d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3287.439850] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3287.440032] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3287.440303] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3287.440561] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4556b04-0430-44ed-8b67-7a915a005f5b {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.446125] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3287.446125] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522b21ef-f5b0-0462-127d-9a2f2c148988" [ 3287.446125] env[61964]: _type = "Task" [ 3287.446125] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3287.453639] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522b21ef-f5b0-0462-127d-9a2f2c148988, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3287.476791] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2193d7-d1a4-4e2c-8e49-ba5c9ccf90de {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.483450] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd94424e-034f-4e6f-8656-1ffdea162b07 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.513012] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7a1cae-6021-4360-a62b-49a4a9bb48ed {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.519620] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd8faf5-2ef0-4c55-903b-1d5ef7f253b9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.532120] env[61964]: DEBUG nova.compute.provider_tree [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3287.565219] env[61964]: DEBUG nova.scheduler.client.report [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Updated inventory for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 3287.565474] env[61964]: DEBUG nova.compute.provider_tree [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Updating resource provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 generation from 104 to 105 during operation: update_inventory {{(pid=61964) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 3287.565650] env[61964]: DEBUG nova.compute.provider_tree [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Updating inventory in ProviderTree for provider c64b88bc-0cc7-41f7-af90-1e96b384d8a5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61964) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3287.578846] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3287.579393] env[61964]: ERROR nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3287.579393] env[61964]: Faults: ['InvalidArgument'] [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] Traceback (most recent call last): [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self.driver.spawn(context, instance, image_meta, [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self._fetch_image_if_missing(context, vi) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] image_cache(vi, tmp_image_ds_loc) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] vm_util.copy_virtual_disk( [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] session._wait_for_task(vmdk_copy_task) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return self.wait_for_task(task_ref) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return evt.wait() [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] result = hub.switch() [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] return self.greenlet.switch() [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] self.f(*self.args, **self.kw) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] raise exceptions.translate_fault(task_info.error) [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] Faults: ['InvalidArgument'] [ 3287.579393] env[61964]: ERROR nova.compute.manager [instance: a50141eb-d189-4970-9adc-10a25409b99a] [ 3287.580404] env[61964]: DEBUG nova.compute.utils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] VimFaultException {{(pid=61964) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3287.581474] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Build of instance a50141eb-d189-4970-9adc-10a25409b99a was re-scheduled: A specified parameter was not correct: fileType [ 3287.581474] env[61964]: Faults: ['InvalidArgument'] {{(pid=61964) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 3287.581847] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Unplugging VIFs for instance {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 3287.582072] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61964) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 3287.582255] env[61964]: DEBUG nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3287.582420] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3287.887809] env[61964]: DEBUG nova.network.neutron [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3287.898845] env[61964]: INFO nova.compute.manager [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Took 0.32 seconds to deallocate network for instance. [ 3287.957254] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522b21ef-f5b0-0462-127d-9a2f2c148988, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3287.957566] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380 is no longer used. Deleting! [ 3287.957702] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3287.957968] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a35cfde4-a5f1-4315-8096-a0eacff2b92e {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3287.964722] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3287.964722] env[61964]: value = "task-1688898" [ 3287.964722] env[61964]: _type = "Task" [ 3287.964722] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3287.976667] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3287.991404] env[61964]: INFO nova.scheduler.client.report [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Deleted allocations for instance a50141eb-d189-4970-9adc-10a25409b99a [ 3288.012440] env[61964]: DEBUG oslo_concurrency.lockutils [None req-1acccca6-9913-4ab4-8b5c-3589b058a887 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 403.487s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3288.013037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 206.991s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3288.013037] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Acquiring lock "a50141eb-d189-4970-9adc-10a25409b99a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 3288.013215] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 3288.013283] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3288.015517] env[61964]: INFO nova.compute.manager [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Terminating instance [ 3288.017157] env[61964]: DEBUG nova.compute.manager [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Start destroying the instance on the hypervisor. {{(pid=61964) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 3288.017347] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Destroying instance {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3288.017812] env[61964]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91fe6d68-bf13-45ae-8a41-4b737cb6b8f2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3288.026204] env[61964]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04450c05-d61e-4e01-ba17-bce99c685f14 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3288.053357] env[61964]: WARNING nova.virt.vmwareapi.vmops [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a50141eb-d189-4970-9adc-10a25409b99a could not be found. [ 3288.053565] env[61964]: DEBUG nova.virt.vmwareapi.vmops [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Instance destroyed {{(pid=61964) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3288.053741] env[61964]: INFO nova.compute.manager [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3288.053984] env[61964]: DEBUG oslo.service.loopingcall [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61964) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3288.054219] env[61964]: DEBUG nova.compute.manager [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Deallocating network for instance {{(pid=61964) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 3288.054311] env[61964]: DEBUG nova.network.neutron [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] deallocate_for_instance() {{(pid=61964) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3288.079041] env[61964]: DEBUG nova.network.neutron [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Updating instance_info_cache with network_info: [] {{(pid=61964) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3288.088831] env[61964]: INFO nova.compute.manager [-] [instance: a50141eb-d189-4970-9adc-10a25409b99a] Took 0.03 seconds to deallocate network for instance. [ 3288.173555] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7af48924-61c4-4ad7-8665-450297e76d90 tempest-AttachVolumeShelveTestJSON-2029598495 tempest-AttachVolumeShelveTestJSON-2029598495-project-member] Lock "a50141eb-d189-4970-9adc-10a25409b99a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.161s {{(pid=61964) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 3288.475014] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107273} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3288.475203] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3288.475381] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/0a39ea25-fe66-4db2-9c78-96208a9e2380" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3288.475597] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3288.475716] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3288.476078] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3288.476306] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f605eede-c00a-48d0-b4c8-189b9f4ba742 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3288.480757] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3288.480757] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52824656-2339-20d5-0603-9e465024e6f2" [ 3288.480757] env[61964]: _type = "Task" [ 3288.480757] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3288.488190] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52824656-2339-20d5-0603-9e465024e6f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3288.990879] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52824656-2339-20d5-0603-9e465024e6f2, 'name': SearchDatastore_Task, 'duration_secs': 0.013207} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3288.991246] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113/ts-2024-11-29-16-10-47 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3288.991530] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ffcc320-7bb9-460e-92da-5ce65d348138 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3289.003536] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113/ts-2024-11-29-16-10-47 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3289.003689] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113 is no longer used by this node. Pending deletion! [ 3289.003840] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/b1f1d4fc-4128-4a9a-81d4-3ad50d6d7113" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3289.004055] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3289.004175] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3289.004487] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3289.004716] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1002612-63be-4c19-9f1f-b42238444670 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3289.008599] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3289.008599] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529bff99-1011-f883-8f95-0e68e18be78d" [ 3289.008599] env[61964]: _type = "Task" [ 3289.008599] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3289.016010] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529bff99-1011-f883-8f95-0e68e18be78d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3289.518923] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]529bff99-1011-f883-8f95-0e68e18be78d, 'name': SearchDatastore_Task, 'duration_secs': 0.009562} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3289.519261] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e is no longer used. Deleting! [ 3289.519401] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3289.519656] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60a5306a-486b-4b1c-bda3-9952b9e175fd {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3289.525454] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3289.525454] env[61964]: value = "task-1688899" [ 3289.525454] env[61964]: _type = "Task" [ 3289.525454] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3289.532667] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3290.035073] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098419} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3290.035383] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3290.035463] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ae463e16-6e05-47c7-b56a-58e779d3fd8e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3290.035673] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3290.035790] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3290.036116] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3290.036376] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8cf652d-1e52-4a06-bbd4-5dc46d8eed12 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3290.040604] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3290.040604] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52623754-4881-bce7-51af-976192a19ed6" [ 3290.040604] env[61964]: _type = "Task" [ 3290.040604] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3290.047871] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52623754-4881-bce7-51af-976192a19ed6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3290.552011] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52623754-4881-bce7-51af-976192a19ed6, 'name': SearchDatastore_Task, 'duration_secs': 0.00797} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3290.552359] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8 is no longer used. Deleting! [ 3290.552555] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3290.552830] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f131e1f-189e-49d8-abde-4e955951c1d1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3290.558435] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3290.558435] env[61964]: value = "task-1688900" [ 3290.558435] env[61964]: _type = "Task" [ 3290.558435] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3290.566018] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3291.069150] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134644} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3291.069476] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3291.069476] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/6ae83567-8c2d-4617-8576-39cf44491ff8" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3291.069703] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3291.069837] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3291.070196] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3291.070467] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e92f547f-0f1c-4314-8eae-d22926985266 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3291.074777] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3291.074777] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5289563d-2ded-c9cb-84d9-7fcf91902dbb" [ 3291.074777] env[61964]: _type = "Task" [ 3291.074777] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3291.082130] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5289563d-2ded-c9cb-84d9-7fcf91902dbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3291.586054] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5289563d-2ded-c9cb-84d9-7fcf91902dbb, 'name': SearchDatastore_Task, 'duration_secs': 0.013067} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3291.586054] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d is no longer used. Deleting! [ 3291.586054] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3291.586346] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f31f02f9-3aa1-4533-a434-7cd7701dbde2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3291.591913] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3291.591913] env[61964]: value = "task-1688901" [ 3291.591913] env[61964]: _type = "Task" [ 3291.591913] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3291.599227] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3292.101224] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113322} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3292.101598] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3292.101598] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d70270c4-ccc2-4172-96f0-d841393c7e6d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3292.101811] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3292.101944] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3292.102292] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3292.102563] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270a2edd-4a1f-4f4b-bc6c-31003efbd641 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3292.106753] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3292.106753] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52700801-37a3-d22a-cfae-0320d8756413" [ 3292.106753] env[61964]: _type = "Task" [ 3292.106753] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3292.114377] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52700801-37a3-d22a-cfae-0320d8756413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3292.617408] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52700801-37a3-d22a-cfae-0320d8756413, 'name': SearchDatastore_Task, 'duration_secs': 0.010479} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3292.617712] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478 is no longer used. Deleting! [ 3292.617855] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3292.618130] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db2c58e9-21af-48ec-adba-2bc50479a386 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3292.624313] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3292.624313] env[61964]: value = "task-1688902" [ 3292.624313] env[61964]: _type = "Task" [ 3292.624313] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3292.632128] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3293.134568] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119034} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3293.134931] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3293.134931] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/934a367b-7da7-4ed3-b14e-33ce62ac9478" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3293.135172] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3293.135287] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3293.135607] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3293.135865] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7dd423-36a0-492e-8653-3a9d22289457 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3293.140183] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3293.140183] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52beaae4-526d-9ab6-28e0-0ae109877cce" [ 3293.140183] env[61964]: _type = "Task" [ 3293.140183] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3293.147541] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52beaae4-526d-9ab6-28e0-0ae109877cce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3293.650652] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52beaae4-526d-9ab6-28e0-0ae109877cce, 'name': SearchDatastore_Task, 'duration_secs': 0.008543} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3293.651625] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556/ts-2024-11-29-16-10-52 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3293.651625] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-157ff7f3-b93c-4140-b1df-86b142c7eff0 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3293.663196] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556/ts-2024-11-29-16-10-52 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3293.663335] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image 366265c2-ce0d-4c0a-8836-4cfc6f81c556 is no longer used by this node. Pending deletion! [ 3293.663492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3293.663697] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3293.663824] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3293.664150] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3293.664417] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03170a5c-4cdc-413c-9181-46887af968a5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3293.668338] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3293.668338] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52372893-f6d6-361f-7a2f-6107081cb46e" [ 3293.668338] env[61964]: _type = "Task" [ 3293.668338] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3293.675816] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52372893-f6d6-361f-7a2f-6107081cb46e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3294.178868] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52372893-f6d6-361f-7a2f-6107081cb46e, 'name': SearchDatastore_Task, 'duration_secs': 0.008369} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3294.179201] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f is no longer used. Deleting! [ 3294.179329] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3294.179581] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8c395fc-e008-4477-853a-a463562f5a5f {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3294.185476] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3294.185476] env[61964]: value = "task-1688903" [ 3294.185476] env[61964]: _type = "Task" [ 3294.185476] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3294.193569] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3294.695268] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140199} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3294.695442] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3294.695606] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/5d4f4e5a-fde2-4694-ac11-664cf08dab3f" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3294.695841] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3294.695984] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3294.696295] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3294.696563] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5e581d3-40dc-4bcd-9749-a951e1f7144d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3294.700839] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3294.700839] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e2cd57-a6df-dccb-b463-3152c17e78a0" [ 3294.700839] env[61964]: _type = "Task" [ 3294.700839] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3294.708247] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e2cd57-a6df-dccb-b463-3152c17e78a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3295.212020] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e2cd57-a6df-dccb-b463-3152c17e78a0, 'name': SearchDatastore_Task, 'duration_secs': 0.010572} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3295.212374] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5 is no longer used. Deleting! [ 3295.212499] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3295.212750] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8638ab34-112d-4e52-aedc-306a6dff9be3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3295.218491] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3295.218491] env[61964]: value = "task-1688904" [ 3295.218491] env[61964]: _type = "Task" [ 3295.218491] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3295.225945] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3295.728537] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10878} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3295.728750] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3295.728877] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/182baca5-1768-4c2b-ba0a-c289429af6f5" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3295.729109] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3295.729256] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3295.729565] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3295.729834] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb59399-13e5-4629-b919-c8749b3b50f4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3295.734137] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3295.734137] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52401182-5be2-7d33-dc83-bf3056ec75f6" [ 3295.734137] env[61964]: _type = "Task" [ 3295.734137] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3295.741651] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52401182-5be2-7d33-dc83-bf3056ec75f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3296.245800] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52401182-5be2-7d33-dc83-bf3056ec75f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010099} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3296.246111] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99 is no longer used. Deleting! [ 3296.246294] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3296.247028] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1e0c1ab-8b44-4498-98b1-ae57a2d8b608 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3296.253107] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3296.253107] env[61964]: value = "task-1688905" [ 3296.253107] env[61964]: _type = "Task" [ 3296.253107] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3296.261272] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3296.763531] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093559} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3296.763750] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3296.764399] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3296.764648] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3296.764766] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3296.765092] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3296.765382] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e8aad1e-3611-4958-ab7d-448caa3fff06 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3296.769737] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3296.769737] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5247c7ee-f80a-ece6-79ab-6926798fd07e" [ 3296.769737] env[61964]: _type = "Task" [ 3296.769737] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3296.777354] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5247c7ee-f80a-ece6-79ab-6926798fd07e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3297.280776] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5247c7ee-f80a-ece6-79ab-6926798fd07e, 'name': SearchDatastore_Task, 'duration_secs': 0.00799} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3297.281136] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee/ts-2024-11-29-16-10-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3297.281325] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a12e3ef-b280-48cd-a69d-3f5a85f40eef {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3297.292859] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee/ts-2024-11-29-16-10-56 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3297.293026] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee is no longer used by this node. Pending deletion! [ 3297.293214] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3297.293433] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3297.293550] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3297.293869] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3297.294127] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a34ca512-5332-45e1-b821-11e21608ae00 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3297.298139] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3297.298139] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f4ef97-fc33-9658-40ec-c6d1b2b56233" [ 3297.298139] env[61964]: _type = "Task" [ 3297.298139] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3297.305723] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f4ef97-fc33-9658-40ec-c6d1b2b56233, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3297.808884] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52f4ef97-fc33-9658-40ec-c6d1b2b56233, 'name': SearchDatastore_Task, 'duration_secs': 0.007766} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3297.809201] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487 is no longer used. Deleting! [ 3297.809345] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3297.809605] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4962852a-43ec-4fd6-9eaa-d547fa496311 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3297.815286] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3297.815286] env[61964]: value = "task-1688906" [ 3297.815286] env[61964]: _type = "Task" [ 3297.815286] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3297.822565] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3298.325367] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091215} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3298.325722] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3298.325878] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/a34ca895-1b71-4013-a91f-e2b3876aa487" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3298.326125] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3298.326247] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3298.326629] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3298.326933] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f574657-f1a3-4568-b7ea-73ddfcf756bc {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3298.331127] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3298.331127] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527c5d99-5b9f-5724-ff77-a052db96803a" [ 3298.331127] env[61964]: _type = "Task" [ 3298.331127] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3298.339334] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527c5d99-5b9f-5724-ff77-a052db96803a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3298.841430] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]527c5d99-5b9f-5724-ff77-a052db96803a, 'name': SearchDatastore_Task, 'duration_secs': 0.009223} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3298.841741] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469 is no longer used. Deleting! [ 3298.841886] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3298.842199] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a706695-78dc-44f2-872a-45b3d5b216a3 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3298.848446] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3298.848446] env[61964]: value = "task-1688907" [ 3298.848446] env[61964]: _type = "Task" [ 3298.848446] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3298.856170] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3299.358485] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107702} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3299.358844] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3299.359069] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/3b18b0bb-94e4-485b-99be-1c73c27df469" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3299.359315] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3299.359436] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3299.359742] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3299.360037] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98042fd-ca1a-4a84-a6a6-aa573582af18 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3299.364277] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3299.364277] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522dc28c-13fb-e2ad-e6a6-5cd483da1738" [ 3299.364277] env[61964]: _type = "Task" [ 3299.364277] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3299.371626] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522dc28c-13fb-e2ad-e6a6-5cd483da1738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3299.874879] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]522dc28c-13fb-e2ad-e6a6-5cd483da1738, 'name': SearchDatastore_Task, 'duration_secs': 0.008822} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3299.875211] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7 is no longer used. Deleting! [ 3299.875359] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3299.875616] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb169d0-ac0c-4812-afea-da7c36ff6213 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3299.882203] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3299.882203] env[61964]: value = "task-1688908" [ 3299.882203] env[61964]: _type = "Task" [ 3299.882203] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3299.889626] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3300.393043] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146305} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3300.393043] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3300.393043] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/2be3432d-4187-4953-8639-792fd1e9c8e7" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3300.393043] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3300.393043] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3300.393519] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3300.393557] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b95a650e-350d-4f60-accb-2639c321ffa9 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3300.397855] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3300.397855] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a9f1-29a0-ed93-11d4-ac4f1ea9cdce" [ 3300.397855] env[61964]: _type = "Task" [ 3300.397855] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3300.405423] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a9f1-29a0-ed93-11d4-ac4f1ea9cdce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3300.908193] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52e9a9f1-29a0-ed93-11d4-ac4f1ea9cdce, 'name': SearchDatastore_Task, 'duration_secs': 0.009239} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3300.908508] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d is no longer used. Deleting! [ 3300.908653] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3300.908918] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f973127-db9d-4b72-9d4f-29163cc8798d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3300.917194] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3300.917194] env[61964]: value = "task-1688909" [ 3300.917194] env[61964]: _type = "Task" [ 3300.917194] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3300.923451] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3301.425061] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107401} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3301.425428] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3301.425490] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/cc67f98c-86dc-4330-87be-7e371f1f586d" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3301.425665] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3301.425780] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3301.426159] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3301.426433] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-002e30e8-69b4-4fa2-b152-2b7202cbfbf4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3301.430780] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3301.430780] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d50fe8-53ca-75c3-3c63-d6de825d3e5f" [ 3301.430780] env[61964]: _type = "Task" [ 3301.430780] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3301.438019] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d50fe8-53ca-75c3-3c63-d6de825d3e5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3301.941361] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52d50fe8-53ca-75c3-3c63-d6de825d3e5f, 'name': SearchDatastore_Task, 'duration_secs': 0.013986} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3301.941674] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a is no longer used. Deleting! [ 3301.941819] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3301.942119] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce5b8c96-c07f-4f27-9c8e-3c080591d6b4 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3301.947918] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3301.947918] env[61964]: value = "task-1688910" [ 3301.947918] env[61964]: _type = "Task" [ 3301.947918] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3301.955111] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3302.458134] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106389} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3302.458481] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3302.458522] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/cc0de5fe-10c0-4314-9932-5f1d62182f5a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3302.458718] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3302.458845] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3302.459191] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3302.459459] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e97fcabd-d931-4774-9aab-ed60bc119f3d {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3302.463633] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3302.463633] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de0a6d-c394-fbb1-cf7f-b71b78f07b9f" [ 3302.463633] env[61964]: _type = "Task" [ 3302.463633] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3302.471479] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de0a6d-c394-fbb1-cf7f-b71b78f07b9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3302.983601] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52de0a6d-c394-fbb1-cf7f-b71b78f07b9f, 'name': SearchDatastore_Task, 'duration_secs': 0.012931} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3302.983936] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1 is no longer used. Deleting! [ 3302.984104] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3302.984362] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e88f655-636c-4214-896e-8c74ad56c993 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3302.992887] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3302.992887] env[61964]: value = "task-1688911" [ 3302.992887] env[61964]: _type = "Task" [ 3302.992887] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3303.002418] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3303.502430] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110736} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3303.503173] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3303.503501] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/0dc5ea75-0951-4265-b5ec-b8bf9f8fc3b1" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3303.503864] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3303.504151] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3303.506025] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3303.506025] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029afe83-1390-440c-b1d1-323ec2897fd1 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3303.509186] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3303.509186] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5216fb46-c057-984f-a3e8-2da3aae37209" [ 3303.509186] env[61964]: _type = "Task" [ 3303.509186] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3303.516475] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5216fb46-c057-984f-a3e8-2da3aae37209, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3304.022447] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5216fb46-c057-984f-a3e8-2da3aae37209, 'name': SearchDatastore_Task, 'duration_secs': 0.008962} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3304.022447] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b is no longer used. Deleting! [ 3304.022447] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3304.022447] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f77254b3-3d24-4caa-b6b1-1e625f5ed2d5 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3304.029030] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3304.029030] env[61964]: value = "task-1688912" [ 3304.029030] env[61964]: _type = "Task" [ 3304.029030] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3304.034534] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3304.537121] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102776} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3304.537832] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3304.538164] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/d99c9423-0691-47a0-aa23-8724a8a60b6b" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3304.538514] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3304.538783] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3304.539294] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3304.541026] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-072a0ef4-f170-49fa-b452-6a9c39ac7353 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3304.544208] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3304.544208] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5261d2e0-9ad2-0c1f-c951-eee9f481613b" [ 3304.544208] env[61964]: _type = "Task" [ 3304.544208] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3304.551738] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5261d2e0-9ad2-0c1f-c951-eee9f481613b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3305.056041] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]5261d2e0-9ad2-0c1f-c951-eee9f481613b, 'name': SearchDatastore_Task, 'duration_secs': 0.008064} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3305.056041] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Creating directory with path [datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e/ts-2024-11-29-16-11-03 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3305.056041] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7178118b-687e-4052-b86f-68fd76e4e5ca {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3305.068447] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Created directory with path [datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e/ts-2024-11-29-16-11-03 {{(pid=61964) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3305.068669] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image e85853cc-33ff-4df6-aa9d-eb02e938717e is no longer used by this node. Pending deletion! [ 3305.068904] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3305.069186] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3305.069368] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3305.069725] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3305.070041] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23b6268-6868-4092-8d9e-27c44a233e54 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3305.074101] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3305.074101] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52002360-87d1-e273-f2cd-a2c20f7e0768" [ 3305.074101] env[61964]: _type = "Task" [ 3305.074101] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3305.081100] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52002360-87d1-e273-f2cd-a2c20f7e0768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3305.585538] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52002360-87d1-e273-f2cd-a2c20f7e0768, 'name': SearchDatastore_Task, 'duration_secs': 0.009042} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3305.586062] env[61964]: INFO nova.virt.vmwareapi.imagecache [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Image [datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308 is no longer used. Deleting! [ 3305.586215] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308 {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3305.586478] env[61964]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eedb42d4-7425-42d4-9404-84c154c85928 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3305.592396] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3305.592396] env[61964]: value = "task-1688913" [ 3305.592396] env[61964]: _type = "Task" [ 3305.592396] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3305.601067] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3306.103031] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': task-1688913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116077} completed successfully. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3306.103308] env[61964]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Deleted the datastore file {{(pid=61964) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3306.103492] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Releasing lock "[datastore1] devstack-image-cache_base/335f42ad-f9c3-44e8-8628-463c7a5cf308" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 3306.104193] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquiring lock "[datastore1] devstack-image-cache_base/676770fd-0fa6-4b9c-bbd5-59d3cb15196a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 3306.104193] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired lock "[datastore1] devstack-image-cache_base/676770fd-0fa6-4b9c-bbd5-59d3cb15196a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 3306.104370] env[61964]: DEBUG oslo_concurrency.lockutils [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/676770fd-0fa6-4b9c-bbd5-59d3cb15196a" {{(pid=61964) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 3306.104551] env[61964]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f967d33-0291-4155-88f4-65ac572508a2 {{(pid=61964) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3306.108911] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Waiting for the task: (returnval){ [ 3306.108911] env[61964]: value = "session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52efc2b6-3c45-fba4-6d1a-55e0716e8c86" [ 3306.108911] env[61964]: _type = "Task" [ 3306.108911] env[61964]: } to complete. {{(pid=61964) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3306.117828] env[61964]: DEBUG oslo_vmware.api [None req-7bca140e-493d-4da3-967c-013648a1ebe0 None None] Task: {'id': session[52a1013f-8fc6-22ee-80bf-2fc230e0d749]52efc2b6-3c45-fba4-6d1a-55e0716e8c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61964) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}}